def testWriteSessionStartAndCompletion(self): """Tests the WriteSessionStart and WriteSessionCompletion functions.""" session = sessions.Session() storage_writer = fake_writer.FakeStorageWriter() storage_writer.Open() storage_writer.WriteSessionStart(session) storage_writer.WriteSessionCompletion(session) storage_writer.Close() with self.assertRaises(IOError): storage_writer.WriteSessionStart(session) with self.assertRaises(IOError): storage_writer.WriteSessionCompletion(session) storage_writer = fake_writer.FakeStorageWriter( storage_type=definitions.STORAGE_TYPE_TASK) storage_writer.Open() with self.assertRaises(IOError): storage_writer.WriteSessionStart(session) with self.assertRaises(IOError): storage_writer.WriteSessionCompletion(session) storage_writer.Close()
def testWriteTaskStartAndCompletion(self): """Tests the WriteTaskStart and WriteTaskCompletion functions.""" session = sessions.Session() task = tasks.Task(session_identifier=session.identifier) storage_writer = fake_writer.FakeStorageWriter( storage_type=definitions.STORAGE_TYPE_TASK) storage_writer.Open() storage_writer.WriteTaskStart(task) storage_writer.WriteTaskCompletion(task) storage_writer.Close() with self.assertRaises(IOError): storage_writer.WriteTaskStart(task) with self.assertRaises(IOError): storage_writer.WriteTaskCompletion(task) storage_writer = fake_writer.FakeStorageWriter() storage_writer.Open() with self.assertRaises(IOError): storage_writer.WriteTaskStart(task) with self.assertRaises(IOError): storage_writer.WriteTaskCompletion(task) storage_writer.Close()
def testProcessPathSpecCompressedArchive(self): """Tests the ProcessPathSpec function on a compressed archive file.""" knowledge_base_values = {'year': 2016} session = sessions.Session() source_path = self._GetTestFilePath(['syslog.tgz']) path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=source_path) path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_GZIP, parent=path_spec) path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_TAR, location='/syslog', parent=path_spec) storage_writer = fake_writer.FakeStorageWriter(session) self._TestProcessPathSpec(storage_writer, path_spec, knowledge_base_values=knowledge_base_values) self.assertEqual(storage_writer.number_of_events, 13) # Process an archive file with "process archive files" mode. path_spec = self._GetTestFilePathSpec(['syslog.tgz']) storage_writer = fake_writer.FakeStorageWriter(session) self._TestProcessPathSpec(storage_writer, path_spec, knowledge_base_values=knowledge_base_values, process_archives=True) self.assertEqual(storage_writer.number_of_events, 17)
def testProcessPathSpec(self): """Tests the ProcessPathSpec function on an archive file.""" knowledge_base_values = {'year': 2016} session = sessions.Session() test_file_path = self._GetTestFilePath(['syslog.tar']) self._SkipIfPathNotExists(test_file_path) path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=test_file_path) path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_TAR, location='/syslog', parent=path_spec) storage_writer = fake_writer.FakeStorageWriter(session) expected_event_counters = { 'fs:stat': 1, 'syslog:cron:task_run': 3, 'syslog:line': 9 } self._TestProcessPathSpec(storage_writer, path_spec, expected_event_counters, knowledge_base_values=knowledge_base_values) # Process an archive file without "process archive files" mode. path_spec = self._GetTestFilePathSpec(['syslog.tar']) storage_writer = fake_writer.FakeStorageWriter(session) # Typically there are 3 filestat events, but there can be 4 on platforms # that support os.stat_result st_birthtime. expected_event_counters = {'fs:stat': [3, 4]} self._TestProcessPathSpec(storage_writer, path_spec, expected_event_counters, knowledge_base_values=knowledge_base_values) # Process an archive file with "process archive files" mode. path_spec = self._GetTestFilePathSpec(['syslog.tar']) storage_writer = fake_writer.FakeStorageWriter(session) # Typically there are 3 filestat events, but there can be 4 on platforms # that support os.stat_result st_birthtime. There is 1 additional filestat # event from the .tar file. expected_event_counters = { 'fs:stat': [4, 5], 'syslog:cron:task_run': 3, 'syslog:line': 9 } self._TestProcessPathSpec(storage_writer, path_spec, expected_event_counters, knowledge_base_values=knowledge_base_values, process_archives=True)
def _AnalyzeEvents(self, event_objects, plugin, knowledge_base_values=None): """Analyzes events using the analysis plugin. Args: event_objects (list[EventObject]]): events to analyze. plugin (AnalysisPlugin): plugin. knowledge_base_values (Optional[dict[str, str]]): knowledge base values. Returns: FakeStorageWriter: storage writer. """ knowledge_base_object = self._SetUpKnowledgeBase( knowledge_base_values=knowledge_base_values) session = sessions.Session() storage_writer = fake_writer.FakeStorageWriter(session) storage_writer.Open() for event in event_objects: storage_writer.AddEvent(event) mediator = analysis_mediator.AnalysisMediator(storage_writer, knowledge_base_object) for event in event_objects: plugin.ExamineEvent(mediator, event) analysis_report = plugin.CompileReport(mediator) storage_writer.AddAnalysisReport(analysis_report) return storage_writer
def testExtractionWorkerHashing(self): """Test that the worker sets up and runs hashing code correctly.""" extraction_worker = worker.EventExtractionWorker() extraction_worker._SetHashers('md5') self.assertIn('hashing', extraction_worker.GetAnalyzerNames()) knowledge_base_values = {'year': 2016} session = sessions.Session() path_spec = self._GetTestFilePathSpec(['empty_file']) storage_writer = fake_writer.FakeStorageWriter(session) # Typically there are 3 filestat events, but there can be 4 on platforms # that support os.stat_result st_birthtime. expected_event_counters = {'fs:stat': [3, 4]} self._TestProcessPathSpec(storage_writer, path_spec, expected_event_counters, extraction_worker=extraction_worker, knowledge_base_values=knowledge_base_values) storage_writer.Open() empty_file_md5 = 'd41d8cd98f00b204e9800998ecf8427e' for event in storage_writer.GetSortedEvents(): event_data = self._GetEventDataOfEvent(storage_writer, event) event_data_stream = self._GetEventDataStreamOfEventData( storage_writer, event_data) self.assertEqual(event_data_stream.md5_hash, empty_file_md5) storage_writer.Close()
def testPreprocessSources(self): """Tests the PreprocessSources function.""" test_file_path = self._GetTestFilePath(['SOFTWARE']) self._SkipIfPathNotExists(test_file_path) test_file_path = self._GetTestFilePath(['SYSTEM']) self._SkipIfPathNotExists(test_file_path) test_artifacts_path = shared_test_lib.GetTestFilePath(['artifacts']) self._SkipIfPathNotExists(test_artifacts_path) registry = artifacts_registry.ArtifactDefinitionsRegistry() reader = artifacts_reader.YamlArtifactsReader() registry.ReadFromDirectory(reader, test_artifacts_path) test_engine = TestEngine() source_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_FAKE, location='/') session = test_engine.CreateSession() storage_writer = fake_writer.FakeStorageWriter() storage_writer.Open() test_engine.PreprocessSources( registry, [source_path_spec], session, storage_writer) operating_system = test_engine.knowledge_base.GetValue('operating_system') self.assertEqual(operating_system, 'Windows NT') test_engine.PreprocessSources(registry, [None], session, storage_writer)
def testCollect(self): """Tests the Collect function.""" file_system_builder = fake_file_system_builder.FakeFileSystemBuilder() test_file_path = shared_test_lib.GetTestFilePath(['SOFTWARE']) file_system_builder.AddFileReadData( '/Windows/System32/config/SOFTWARE', test_file_path) test_file_path = shared_test_lib.GetTestFilePath(['SYSTEM']) file_system_builder.AddFileReadData('/Windows/System32/config/SYSTEM', test_file_path) session = sessions.Session() test_knowledge_base = knowledge_base.KnowledgeBase() storage_writer = fake_writer.FakeStorageWriter() test_mediator = mediator.PreprocessMediator(session, storage_writer, test_knowledge_base) mount_point = fake_path_spec.FakePathSpec(location='/') searcher = file_system_searcher.FileSystemSearcher( file_system_builder.file_system, mount_point) plugin = generic.DetermineOperatingSystemPlugin() storage_writer.Open() try: plugin.Collect(test_mediator, None, searcher, file_system_builder.file_system) finally: storage_writer.Close() operating_system = test_mediator.knowledge_base.GetValue( 'operating_system') self.assertEqual(operating_system, 'Windows NT')
def _ParseESEDBFileWithPlugin( self, path_segments, plugin, knowledge_base_values=None): """Parses a file as an ESE database file and returns an event generator. Args: path_segments (list[str]): path segments inside the test data directory. plugin (ESEDBPlugin): ESE database plugin. knowledge_base_values (Optional[dict[str, object]]): knowledge base values. Returns: FakeStorageWriter: storage writer. """ session = sessions.Session() storage_writer = fake_writer.FakeStorageWriter(session) storage_writer.Open() file_entry = self._GetTestFileEntry(path_segments) parser_mediator = self._CreateParserMediator( storage_writer, file_entry=file_entry, knowledge_base_values=knowledge_base_values) file_object = file_entry.GetFileObject() try: esedb_file = pyesedb.file() esedb_file.open_file_object(file_object) cache = esedb.ESEDBCache() plugin.Process(parser_mediator, cache=cache, database=esedb_file) esedb_file.close() finally: file_object.close() return storage_writer
def testExtractionWorkerYara(self): """Tests that the worker applies Yara matching code correctly.""" extraction_worker = worker.EventExtractionWorker() rule_path = self._GetTestFilePath(['yara.rules']) with open(rule_path, 'r') as rule_file: rule_string = rule_file.read() extraction_worker._SetYaraRules(rule_string) self.assertIn('yara', extraction_worker.GetAnalyzerNames()) knowledge_base_values = {'year': 2016} session = sessions.Session() path_spec = self._GetTestFilePathSpec(['test_pe.exe']) storage_writer = fake_writer.FakeStorageWriter(session) self._TestProcessPathSpec(storage_writer, path_spec, extraction_worker=extraction_worker, knowledge_base_values=knowledge_base_values) storage_writer.Open() expected_yara_match = 'PEfileBasic,PEfile' for event in storage_writer.GetSortedEvents(): yara_match = getattr(event, 'yara_match', None) self.assertEqual(yara_match, expected_yara_match) storage_writer.Close()
def testExtractionWorkerHashing(self): """Test that the worker sets up and runs hashing code correctly.""" extraction_worker = worker.EventExtractionWorker() extraction_worker._SetHashers('md5') self.assertIn('hashing', extraction_worker.GetAnalyzerNames()) knowledge_base_values = {'year': 2016} session = sessions.Session() path_spec = self._GetTestFilePathSpec(['empty_file']) storage_writer = fake_writer.FakeStorageWriter(session) self._TestProcessPathSpec(storage_writer, path_spec, extraction_worker=extraction_worker, knowledge_base_values=knowledge_base_values) storage_writer.Open() empty_file_md5 = 'd41d8cd98f00b204e9800998ecf8427e' for event in storage_writer.GetSortedEvents(): md5_hash = getattr(event, 'md5_hash', None) self.assertEqual(md5_hash, empty_file_md5) storage_writer.Close()
def testAddEventTag(self): """Tests the AddEventTag function.""" session = sessions.Session() storage_writer = fake_writer.FakeStorageWriter(session) storage_writer.Open() test_events = [] for event, event_data, event_data_stream in ( containers_test_lib.CreateEventsFromValues(self._TEST_EVENTS)): storage_writer.AddEventDataStream(event_data_stream) event_data.SetEventDataStreamIdentifier( event_data_stream.GetIdentifier()) storage_writer.AddEventData(event_data) event.SetEventDataIdentifier(event_data.GetIdentifier()) storage_writer.AddEvent(event) test_events.append(event) event_tag = None test_event_tags = self._CreateTestEventTags(test_events) for event_tag in test_event_tags: storage_writer.AddEventTag(event_tag) storage_writer.Close() # Test writing an event tag twice. with self.assertRaises(IOError): storage_writer.AddEventTag(event_tag)
def testGetValuesFromKey(self): """Tests the _GetValuesFromKey function.""" session = sessions.Session() storage_writer = fake_writer.FakeStorageWriter() storage_writer.Open() parser_mediator = self._CreateParserMediator(session, storage_writer) registry_key = dfwinreg_fake.FakeWinRegistryKey( 'Explorer', key_path='HKEY_LOCAL_MACHINE\\Software\\Windows\\MRU') value_data = b'a\x00' registry_value = dfwinreg_fake.FakeWinRegistryValue( 'MRUList', data=value_data, data_type=dfwinreg_definitions.REG_BINARY) registry_key.AddValue(registry_value) value_data = b'o\x00n\x00e\x00' registry_value = dfwinreg_fake.FakeWinRegistryValue( 'a', data=value_data, data_type=dfwinreg_definitions.REG_SZ) registry_key.AddValue(registry_value) plugin = interface.WindowsRegistryPlugin() expected_value_dict = { 'a': '[REG_SZ] one', 'MRUList': '[REG_BINARY] (2 bytes)' } values_dict = plugin._GetValuesFromKey(parser_mediator, registry_key) self.assertEqual(sorted(values_dict.items()), sorted(expected_value_dict.items()))
def _ParseFileWithPlugin( self, path_segments, plugin_name, knowledge_base_values=None, timezone='UTC'): """Parses a syslog file with a specific plugin. Args: path_segments (list[str]): path segments inside the test data directory. plugin_name (str): name of the plugin. knowledge_base_values (Optional[dict]): knowledge base values. timezone (Optional[str]): time zone. Returns: FakeStorageWriter: storage writer. Raises: SkipTest: if the path inside the test data directory does not exist and the test should be skipped. """ session = sessions.Session() storage_writer = fake_writer.FakeStorageWriter() storage_writer.Open() file_entry = self._GetTestFileEntry(path_segments) parser_mediator = self._CreateParserMediator( session, storage_writer, file_entry=file_entry, knowledge_base_values=knowledge_base_values, timezone=timezone) parser = syslog.SyslogParser() parser.EnablePlugins([plugin_name]) file_object = file_entry.GetFileObject() parser.Parse(parser_mediator, file_object) return storage_writer
def _ParseFile(self, path_segments, parser, knowledge_base_object): """Parses a file using the parser. Args: path_segments (list[str]): path segments inside the test data directory. parser (BaseParser): parser. knowledge_base_object (KnowledgeBase): knowledge base. Returns: FakeStorageWriter: storage writer. """ session = sessions.Session() storage_writer = fake_writer.FakeStorageWriter(session) storage_writer.Open() mediator = parsers_mediator.ParserMediator(storage_writer, knowledge_base_object) file_entry = self._GetTestFileEntry(path_segments) mediator.SetFileEntry(file_entry) if isinstance(parser, parsers_interface.FileEntryParser): parser.Parse(mediator) elif isinstance(parser, parsers_interface.FileObjectParser): file_object = file_entry.GetFileObject() try: parser.Parse(mediator, file_object) finally: file_object.close() else: self.fail('Got unexpected parser type: {0:s}'.format(type(parser))) return storage_writer
def testProduceEventWithEventData(self): """Tests the ProduceEventWithEventData method.""" knowledge_base_object = knowledge_base.KnowledgeBase() parser_mediator = mediator.ParserMediator(knowledge_base_object) storage_writer = fake_writer.FakeStorageWriter() parser_mediator.SetStorageWriter(storage_writer) storage_writer.Open() event_data_stream = events.EventDataStream() parser_mediator.ProduceEventDataStream(event_data_stream) date_time = fake_time.FakeTime() event_with_timestamp = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_WRITTEN) event_with_timestamp.parser = 'test_parser' event_data = events.EventData() event_data.parser = 'test_parser' parser_mediator.ProduceEventWithEventData(event_with_timestamp, event_data) self.assertEqual(storage_writer.number_of_events, 1) self.assertEqual(storage_writer.number_of_extraction_warnings, 0) self.assertEqual(storage_writer.number_of_recovery_warnings, 0) event_without_timestamp = events.EventObject() event_without_timestamp.parser = 'test_parser' with self.assertRaises(errors.InvalidEvent): parser_mediator.ProduceEventWithEventData(event_without_timestamp, event_data)
def _ParseFileWithPlugin( self, path_segments, plugin_name, knowledge_base_values=None): """Parses a syslog file with a specific plugin. Args: path_segments (list[str]): path segments inside the test data directory. plugin_name (str): name of the plugin. knowledge_base_values (Optional[dict]): knowledge base values. Returns: FakeStorageWriter: storage writer. """ session = sessions.Session() storage_writer = fake_writer.FakeStorageWriter(session) storage_writer.Open() file_entry = self._GetTestFileEntry(path_segments) parser_mediator = self._CreateParserMediator( storage_writer, file_entry=file_entry, knowledge_base_values=knowledge_base_values) parser = syslog.SyslogParser() parser.EnablePlugins([plugin_name]) file_object = file_entry.GetFileObject() try: parser.Parse(parser_mediator, file_object) finally: file_object.close() return storage_writer
def _ParsePlistWithPlugin(self, plugin, plist_name, top_level_object, knowledge_base_values=None): """Parses a plist using the plugin object. Args: plugin (PlistPlugin): a plist plugin. plist_name (str): name of the plist to parse. top_level_object (dict[str, object]): plist top-level key. knowledge_base_values (Optional[dict[str, object]]): knowledge base values. Returns: FakeStorageWriter: a storage writer. """ session = sessions.Session() storage_writer = fake_writer.FakeStorageWriter() storage_writer.Open() parser_mediator = self._CreateParserMediator( session, storage_writer, knowledge_base_values=knowledge_base_values) plugin.Process(parser_mediator, plist_name=plist_name, top_level=top_level_object) return storage_writer
def testSignalAbort(self): """Tests the SignalAbort function.""" session = sessions.Session() storage_writer = fake_writer.FakeStorageWriter(session) parsers_mediator = self._CreateParserMediator(storage_writer) parsers_mediator.SignalAbort()
def testProcessSources(self): """Tests the ProcessSources function.""" test_artifacts_path = self._GetTestFilePath(['artifacts']) self._SkipIfPathNotExists(test_artifacts_path) test_file_path = self._GetTestFilePath(['ímynd.dd']) self._SkipIfPathNotExists(test_file_path) registry = artifacts_registry.ArtifactDefinitionsRegistry() reader = artifacts_reader.YamlArtifactsReader() registry.ReadFromDirectory(reader, test_artifacts_path) test_engine = single_process.SingleProcessEngine() resolver_context = context.Context() session = sessions.Session() os_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=test_file_path) source_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_TSK, location='/', parent=os_path_spec) test_engine.PreprocessSources(registry, [source_path_spec]) storage_writer = fake_writer.FakeStorageWriter(session) configuration = configurations.ProcessingConfiguration() configuration.parser_filter_expression = 'filestat' test_engine.ProcessSources([source_path_spec], storage_writer, resolver_context, configuration) self.assertEqual(storage_writer.number_of_events, 15)
def testSetFileEntry(self): """Tests the SetFileEntry function.""" session = sessions.Session() storage_writer = fake_writer.FakeStorageWriter(session) parsers_mediator = self._CreateParserMediator(storage_writer) parsers_mediator.SetFileEntry(None)
def testGetLatestYearFromFileEntry(self): """Tests the _GetLatestYearFromFileEntry function.""" session = sessions.Session() storage_writer = fake_writer.FakeStorageWriter(session) parsers_mediator = self._CreateParserMediator(storage_writer) latest_year = parsers_mediator._GetLatestYearFromFileEntry() self.assertIsNone(latest_year)
def testGetFilename(self): """Tests the GetFilename function.""" session = sessions.Session() storage_writer = fake_writer.FakeStorageWriter(session) parsers_mediator = self._CreateParserMediator(storage_writer) filename = parsers_mediator.GetFilename() self.assertIsNone(filename)
def testProcessSources(self): """Tests the ProcessSources function.""" test_artifacts_path = self._GetTestFilePath(['artifacts']) self._SkipIfPathNotExists(test_artifacts_path) test_file_path = self._GetTestFilePath(['ímynd.dd']) self._SkipIfPathNotExists(test_file_path) test_engine = extraction_engine.SingleProcessEngine() resolver_context = context.Context() os_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=test_file_path) source_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_TSK, location='/', parent=os_path_spec) source_configuration = artifacts.SourceConfigurationArtifact( path_spec=source_path_spec) session = sessions.Session() configuration = configurations.ProcessingConfiguration() configuration.parser_filter_expression = 'filestat' storage_writer = fake_writer.FakeStorageWriter() storage_writer.Open() try: test_engine.PreprocessSources(test_artifacts_path, None, [source_path_spec], session, storage_writer) processing_status = test_engine.ProcessSources( [source_configuration], storage_writer, resolver_context, configuration) parsers_counter = collections.Counter({ parser_count.name: parser_count.number_of_events for parser_count in storage_writer.GetAttributeContainers( 'parser_count') }) finally: storage_writer.Close() self.assertFalse(processing_status.aborted) self.assertEqual(storage_writer.number_of_events, 15) self.assertEqual(storage_writer.number_of_extraction_warnings, 0) self.assertEqual(storage_writer.number_of_recovery_warnings, 0) expected_parsers_counter = collections.Counter({ 'filestat': 15, 'total': 15 }) self.assertEqual(parsers_counter, expected_parsers_counter)
def testGetDisplayName(self): """Tests the GetDisplayName function.""" session = sessions.Session() storage_writer = fake_writer.FakeStorageWriter(session) parsers_mediator = self._CreateParserMediator(storage_writer) with self.assertRaises(ValueError): parsers_mediator.GetDisplayName(file_entry=None) test_file_path = self._GetTestFilePath(['syslog.gz']) self._SkipIfPathNotExists(test_file_path) os_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=test_file_path) file_entry = path_spec_resolver.Resolver.OpenFileEntry(os_path_spec) display_name = parsers_mediator.GetDisplayName(file_entry=file_entry) expected_display_name = 'OS:{0:s}'.format(test_file_path) self.assertEqual(display_name, expected_display_name) gzip_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_GZIP, parent=os_path_spec) file_entry = path_spec_resolver.Resolver.OpenFileEntry(gzip_path_spec) display_name = parsers_mediator.GetDisplayName(file_entry=file_entry) expected_display_name = 'GZIP:{0:s}'.format(test_file_path) self.assertEqual(display_name, expected_display_name) test_file_path = self._GetTestFilePath(['vsstest.qcow2']) self._SkipIfPathNotExists(test_file_path) os_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=test_file_path) qcow_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_QCOW, parent=os_path_spec) vshadow_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_VSHADOW, location='/vss2', store_index=1, parent=qcow_path_spec) tsk_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_TSK, inode=35, location='/syslog.gz', parent=vshadow_path_spec) file_entry = path_spec_resolver.Resolver.OpenFileEntry(tsk_path_spec) display_name = parsers_mediator.GetDisplayName(file_entry=file_entry) expected_display_name = 'VSS2:TSK:/syslog.gz' self.assertEqual(display_name, expected_display_name) configuration = configurations.EventExtractionConfiguration() configuration.text_prepend = 'C:' parsers_mediator.SetEventExtractionConfiguration(configuration) display_name = parsers_mediator.GetDisplayName(file_entry=file_entry) expected_display_name = 'VSS2:TSK:C:/syslog.gz' self.assertEqual(display_name, expected_display_name)
def _CreateTestStorageWriter(self): """Creates a storage writer for testing purposes. Returns: StorageWriter: storage writer. """ storage_writer = fake_writer.FakeStorageWriter() storage_writer.Open() return storage_writer
def _CreateStorageWriter(self): """Creates a storage writer object. Returns: FakeStorageWriter: storage writer. """ storage_writer = fake_writer.FakeStorageWriter() storage_writer.Open() return storage_writer
def testGetLatestYear(self): """Tests the GetLatestYear function.""" session = sessions.Session() storage_writer = fake_writer.FakeStorageWriter(session) parsers_mediator = self._CreateParserMediator(storage_writer) expected_latest_year = timelib.GetCurrentYear() latest_year = parsers_mediator.GetLatestYear() self.assertEqual(latest_year, expected_latest_year)
def testSignalAbort(self): """Tests the SignalAbort function.""" knowledge_base_object = knowledge_base.KnowledgeBase() parser_mediator = mediator.ParserMediator(knowledge_base_object) storage_writer = fake_writer.FakeStorageWriter() parser_mediator.SetStorageWriter(storage_writer) parser_mediator.SignalAbort()
def testSetFileEntry(self): """Tests the SetFileEntry function.""" knowledge_base_object = knowledge_base.KnowledgeBase() parser_mediator = mediator.ParserMediator(knowledge_base_object) storage_writer = fake_writer.FakeStorageWriter() parser_mediator.SetStorageWriter(storage_writer) parser_mediator.SetFileEntry(None)