def testWriteSessionStartAndCompletion(self): """Tests the WriteSessionStart and WriteSessionCompletion functions.""" session = sessions.Session() storage_writer = fake_storage.FakeStorageWriter(session) storage_writer.Open() storage_writer.WriteSessionStart() storage_writer.WriteSessionCompletion() storage_writer.Close() with self.assertRaises(IOError): storage_writer.WriteSessionStart() with self.assertRaises(IOError): storage_writer.WriteSessionCompletion() storage_writer = fake_storage.FakeStorageWriter( session, storage_type=definitions.STORAGE_TYPE_TASK) storage_writer.Open() with self.assertRaises(IOError): storage_writer.WriteSessionStart() with self.assertRaises(IOError): storage_writer.WriteSessionCompletion() storage_writer.Close()
def testWriteTaskStartAndCompletion(self): """Tests the WriteTaskStart and WriteTaskCompletion functions.""" session = sessions.Session() task = tasks.Task(session_identifier=session.identifier) storage_writer = fake_storage.FakeStorageWriter( session, storage_type=definitions.STORAGE_TYPE_TASK, task=task) storage_writer.Open() storage_writer.WriteTaskStart() storage_writer.WriteTaskCompletion() storage_writer.Close() with self.assertRaises(IOError): storage_writer.WriteTaskStart() with self.assertRaises(IOError): storage_writer.WriteTaskCompletion() storage_writer = fake_storage.FakeStorageWriter(session) storage_writer.Open() with self.assertRaises(IOError): storage_writer.WriteTaskStart() with self.assertRaises(IOError): storage_writer.WriteTaskCompletion() storage_writer.Close()
def testProcessPathSpecCompressedArchive(self): """Tests the ProcessPathSpec function on a compressed archive file.""" knowledge_base_values = {'year': 2016} session = sessions.Session() source_path = self._GetTestFilePath(['syslog.tgz']) path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=source_path) path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_GZIP, parent=path_spec) path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_TAR, location='/syslog', parent=path_spec) storage_writer = fake_storage.FakeStorageWriter(session) self._TestProcessPathSpec(storage_writer, path_spec, knowledge_base_values=knowledge_base_values) self.assertEqual(storage_writer.number_of_events, 13) # Process an archive file with "process archive files" mode. path_spec = self._GetTestFilePathSpec(['syslog.tgz']) storage_writer = fake_storage.FakeStorageWriter(session) self._TestProcessPathSpec(storage_writer, path_spec, knowledge_base_values=knowledge_base_values, process_archives=True) self.assertEqual(storage_writer.number_of_events, 17)
def testProcessSources(self): """Tests the ProcessSources function.""" test_engine = single_process.SingleProcessEngine() resolver_context = context.Context() session = sessions.Session() source_path = self._GetTestFilePath([u'ímynd.dd']) os_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=source_path) source_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_TSK, location=u'/', parent=os_path_spec) test_engine.PreprocessSources([source_path_spec]) storage_writer = fake_storage.FakeStorageWriter(session) configuration = configurations.ProcessingConfiguration() configuration.parser_filter_expression = u'filestat' test_engine.ProcessSources([source_path_spec], storage_writer, resolver_context, configuration) self.assertEqual(len(storage_writer.events), 15)
def testInitialize(self): """Tests the __init__ function.""" session = sessions.Session() storage_writer = fake_storage.FakeStorageWriter(session) knowledge_base = self._SetUpKnowledgeBase() mediator.AnalysisMediator(storage_writer, knowledge_base)
def testProcessSources(self): """Tests the ProcessSources function.""" registry = artifacts_registry.ArtifactDefinitionsRegistry() reader = artifacts_reader.YamlArtifactsReader() path = shared_test_lib.GetTestFilePath([u'artifacts']) registry.ReadFromDirectory(reader, path) test_engine = single_process.SingleProcessEngine() resolver_context = context.Context() session = sessions.Session() source_path = self._GetTestFilePath([u'ímynd.dd']) os_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=source_path) source_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_TSK, location=u'/', parent=os_path_spec) test_engine.PreprocessSources(registry, [source_path_spec]) storage_writer = fake_storage.FakeStorageWriter(session) configuration = configurations.ProcessingConfiguration() configuration.parser_filter_expression = u'filestat' test_engine.ProcessSources([source_path_spec], storage_writer, resolver_context, configuration) self.assertEqual(storage_writer.number_of_events, 15)
def _ParseFileWithPlugin(self, path_segments, plugin_name, knowledge_base_values=None): """Parses a syslog file with a specific plugin. Args: path_segments (list[str]): path segments inside the test data directory. plugin_name (str): name of the plugin. knowledge_base_values (Optional[dict]): knowledge base values. Returns: FakeStorageWriter: storage writer. """ session = sessions.Session() storage_writer = fake_storage.FakeStorageWriter(session) storage_writer.Open() file_entry = self._GetTestFileEntry(path_segments) parser_mediator = self._CreateParserMediator( storage_writer, file_entry=file_entry, knowledge_base_values=knowledge_base_values) parser = syslog.SyslogParser() parser.EnablePlugins([plugin_name]) file_object = file_entry.GetFileObject() try: parser.Parse(parser_mediator, file_object) finally: file_object.close() return storage_writer
def testExtractionWorkerYara(self): """Tests that the worker applies Yara matching code correctly.""" extraction_worker = worker.EventExtractionWorker() rule_path = self._GetTestFilePath(['yara.rules']) with open(rule_path, 'r') as rule_file: rule_string = rule_file.read() extraction_worker._SetYaraRules(rule_string) self.assertIn('yara', extraction_worker.GetAnalyzerNames()) knowledge_base_values = {'year': 2016} session = sessions.Session() path_spec = self._GetTestFilePathSpec(['test_pe.exe']) storage_writer = fake_storage.FakeStorageWriter(session) self._TestProcessPathSpec(storage_writer, path_spec, extraction_worker=extraction_worker, knowledge_base_values=knowledge_base_values) storage_writer.Open() expected_yara_match = 'PEfileBasic,PEfile' for event in storage_writer.GetSortedEvents(): yara_match = getattr(event, 'yara_match', None) self.assertEqual(yara_match, expected_yara_match) storage_writer.Close()
def testSignalAbort(self): """Tests the SignalAbort function.""" session = sessions.Session() storage_writer = fake_storage.FakeStorageWriter(session) parsers_mediator = self._CreateParserMediator(storage_writer) parsers_mediator.SignalAbort()
def _AnalyzeEvents(self, event_objects, plugin, knowledge_base_values=None): """Analyzes events using the analysis plugin. Args: event_objects (list[EventObject]]): events to analyze. plugin (AnalysisPlugin): plugin. knowledge_base_values (Optional[dict[str, str]]): knowledge base values. Returns: FakeStorageWriter: storage writer. """ knowledge_base_object = self._SetUpKnowledgeBase( knowledge_base_values=knowledge_base_values) session = sessions.Session() storage_writer = fake_storage.FakeStorageWriter(session) storage_writer.Open() mediator = analysis_mediator.AnalysisMediator(storage_writer, knowledge_base_object) for event in event_objects: plugin.ExamineEvent(mediator, event) analysis_report = plugin.CompileReport(mediator) storage_writer.AddAnalysisReport(analysis_report) return storage_writer
def testSetFileEntry(self): """Tests the SetFileEntry function.""" session = sessions.Session() storage_writer = fake_storage.FakeStorageWriter(session) parsers_mediator = self._CreateParserMediator(storage_writer) parsers_mediator.SetFileEntry(None)
def _ParseFile(self, path_segments, parser, knowledge_base_object): """Parses a file using the parser. Args: path_segments (list[str]): path segments inside the test data directory. parser (BaseParser): parser. knowledge_base_object (KnowledgeBase): knowledge base. Returns: FakeStorageWriter: storage writer. """ session = sessions.Session() storage_writer = fake_storage.FakeStorageWriter(session) storage_writer.Open() mediator = parsers_mediator.ParserMediator(storage_writer, knowledge_base_object) file_entry = self._GetTestFileEntry(path_segments) mediator.SetFileEntry(file_entry) if isinstance(parser, parsers_interface.FileEntryParser): parser.Parse(mediator) elif isinstance(parser, parsers_interface.FileObjectParser): file_object = file_entry.GetFileObject() try: parser.Parse(mediator, file_object) finally: file_object.close() else: self.fail('Got unexpected parser type: {0:s}'.format(type(parser))) return storage_writer
def _ParsePlistWithPlugin( self, plugin, plist_name, top_level_object, knowledge_base_values=None): """Parses a plist using the plugin object. Args: plugin (PlistPlugin): a plist plugin. plist_name (str): name of the plist to parse. top_level_object (dict[str, object]): plist top-level key. knowledge_base_values (Optional[dict[str, object]]): knowledge base values. Returns: FakeStorageWriter: a storage writer. """ session = sessions.Session() storage_writer = fake_storage.FakeStorageWriter(session) storage_writer.Open() parser_mediator = self._CreateParserMediator( storage_writer, knowledge_base_values=knowledge_base_values) plugin.Process( parser_mediator, plist_name=plist_name, top_level=top_level_object) return storage_writer
def testExtractionWorkerHashing(self): """Test that the worker sets up and runs hashing code correctly.""" extraction_worker = worker.EventExtractionWorker() extraction_worker._SetHashers('md5') self.assertIn('hashing', extraction_worker.GetAnalyzerNames()) knowledge_base_values = {'year': 2016} session = sessions.Session() path_spec = self._GetTestFilePathSpec(['empty_file']) storage_writer = fake_storage.FakeStorageWriter(session) self._TestProcessPathSpec(storage_writer, path_spec, extraction_worker=extraction_worker, knowledge_base_values=knowledge_base_values) storage_writer.Open() empty_file_md5 = 'd41d8cd98f00b204e9800998ecf8427e' for event in storage_writer.GetSortedEvents(): md5_hash = getattr(event, 'md5_hash', None) self.assertEqual(md5_hash, empty_file_md5) storage_writer.Close()
def _ParsePlistWithPlugin(self, plugin, plist_name, top_level_object, knowledge_base_values=None): """Parses a plist using the plugin object. Args: plugin: a plist plugin object (instance of PlistPlugin). plist_name: a string containg the name of the plist to parse. top_level_object: the top-level plist object. knowledge_base_values: optional dict containing the knowledge base values. Returns: A storage writer object (instance of FakeStorageWriter). """ session = sessions.Session() storage_writer = fake_storage.FakeStorageWriter(session) storage_writer.Open() parser_mediator = self._CreateParserMediator( storage_writer, knowledge_base_values=knowledge_base_values) plugin.Process(parser_mediator, plist_name=plist_name, top_level=top_level_object) return storage_writer
def testGetDisplayName(self): """Tests the GetDisplayName function.""" session = sessions.Session() storage_writer = fake_storage.FakeStorageWriter(session) parsers_mediator = self._CreateParserMediator( storage_writer, knowledge_base_values=None) with self.assertRaises(ValueError): parsers_mediator.GetDisplayName(file_entry=None) test_path = self._GetTestFilePath([u'syslog.gz']) os_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=test_path) file_entry = path_spec_resolver.Resolver.OpenFileEntry(os_path_spec) display_name = parsers_mediator.GetDisplayName(file_entry=file_entry) expected_display_name = u'OS:{0:s}'.format(test_path) self.assertEqual(display_name, expected_display_name) gzip_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_GZIP, parent=os_path_spec) file_entry = path_spec_resolver.Resolver.OpenFileEntry(gzip_path_spec) display_name = parsers_mediator.GetDisplayName(file_entry=file_entry) expected_display_name = u'GZIP:{0:s}'.format(test_path) self.assertEqual(display_name, expected_display_name) test_path = self._GetTestFilePath([u'vsstest.qcow2']) os_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=test_path) qcow_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_QCOW, parent=os_path_spec) vshadow_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_VSHADOW, location=u'/vss2', store_index=1, parent=qcow_path_spec) tsk_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_TSK, inode=35, location=u'/syslog.gz', parent=vshadow_path_spec) file_entry = path_spec_resolver.Resolver.OpenFileEntry(tsk_path_spec) display_name = parsers_mediator.GetDisplayName(file_entry=file_entry) expected_display_name = u'VSS2:TSK:/syslog.gz' self.assertEqual(display_name, expected_display_name) configuration = configurations.EventExtractionConfiguration() configuration.text_prepend = u'C:' parsers_mediator.SetEventExtractionConfiguration(configuration) display_name = parsers_mediator.GetDisplayName(file_entry=file_entry) expected_display_name = u'VSS2:TSK:C:/syslog.gz' self.assertEqual(display_name, expected_display_name)
def testGetLatestYearFromFileEntry(self): """Tests the _GetLatestYearFromFileEntry function.""" session = sessions.Session() storage_writer = fake_storage.FakeStorageWriter(session) parsers_mediator = self._CreateParserMediator(storage_writer) latest_year = parsers_mediator._GetLatestYearFromFileEntry() self.assertIsNone(latest_year)
def testGetFilename(self): """Tests the GetFilename function.""" session = sessions.Session() storage_writer = fake_storage.FakeStorageWriter(session) parsers_mediator = self._CreateParserMediator(storage_writer) filename = parsers_mediator.GetFilename() self.assertIsNone(filename)
def testGetLatestYear(self): """Tests the GetLatestYear function.""" session = sessions.Session() storage_writer = fake_storage.FakeStorageWriter(session) parsers_mediator = self._CreateParserMediator(storage_writer) expected_latest_year = timelib.GetCurrentYear() latest_year = parsers_mediator.GetLatestYear() self.assertEqual(latest_year, expected_latest_year)
def testSignalAbort(self): """Tests the SignalAbort function.""" session = sessions.Session() storage_writer = fake_storage.FakeStorageWriter(session) knowledge_base = self._SetUpKnowledgeBase() analysis_mediator = mediator.AnalysisMediator( storage_writer, knowledge_base) analysis_mediator.SignalAbort()
def _CreateStorageWriter(self): """Creates a storage writer object. Returns: FakeStorageWriter: storage writer. """ session = sessions.Session() storage_writer = fake_storage.FakeStorageWriter(session) storage_writer.Open() return storage_writer
def testProcessPathSpecCompressedFileBZIP2(self): """Tests the ProcessPathSpec function on a bzip2 compressed file.""" knowledge_base_values = {u'year': 2016} session = sessions.Session() path_spec = self._GetTestFilePathSpec([u'syslog.bz2']) storage_writer = fake_storage.FakeStorageWriter(session) self._TestProcessPathSpec( storage_writer, path_spec, knowledge_base_values=knowledge_base_values) self.assertEqual(storage_writer.number_of_events, 15)
def _CreateStorageWriter(self, session): """Creates a storage writer object. Args: session (Session): session. Returns: FakeStorageWriter: storage writer. """ storage_writer = fake_storage.FakeStorageWriter(session) storage_writer.Open() return storage_writer
def testProcessPathSpecFile(self): """Tests the ProcessPathSpec function on a file.""" knowledge_base_values = {'year': 2016} session = sessions.Session() path_spec = self._GetTestFilePathSpec(['syslog']) storage_writer = fake_storage.FakeStorageWriter(session) self._TestProcessPathSpec(storage_writer, path_spec, knowledge_base_values=knowledge_base_values) self.assertEqual(storage_writer.number_of_events, 19)
def testGetDisplayNameForPathSpec(self): """Tests the GetDisplayNameForPathSpec function.""" session = sessions.Session() storage_writer = fake_storage.FakeStorageWriter(session) parsers_mediator = self._CreateParserMediator(storage_writer) test_path = self._GetTestFilePath([u'syslog.gz']) os_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=test_path) expected_display_name = u'OS:{0:s}'.format(test_path) display_name = parsers_mediator.GetDisplayNameForPathSpec(os_path_spec) self.assertEqual(display_name, expected_display_name)
def testAddEventSource(self): """Tests the AddEventSource function.""" session = sessions.Session() event_source = event_sources.EventSource() storage_writer = fake_storage.FakeStorageWriter(session) storage_writer.Open() storage_writer.AddEventSource(event_source) storage_writer.Close() with self.assertRaises(IOError): storage_writer.AddEventSource(event_source)
def testAddError(self): """Tests the AddError function.""" session = sessions.Session() extraction_error = errors.ExtractionError( message='Test extraction error') storage_writer = fake_storage.FakeStorageWriter(session) storage_writer.Open() storage_writer.AddError(extraction_error) storage_writer.Close() with self.assertRaises(IOError): storage_writer.AddError(extraction_error)
def testAddAnalysisReport(self): """Tests the AddAnalysisReport function.""" session = sessions.Session() analysis_report = reports.AnalysisReport( plugin_name='test', text='test report') storage_writer = fake_storage.FakeStorageWriter(session) storage_writer.Open() storage_writer.AddAnalysisReport(analysis_report) storage_writer.Close() with self.assertRaises(IOError): storage_writer.AddAnalysisReport(analysis_report)
def testMergeFromStorage(self): """Tests the MergeFromStorage function.""" session = sessions.Session() storage_writer = fake_storage.FakeStorageWriter(session) storage_writer.Open() test_file = self._GetTestFilePath([u'psort_test.json.plaso']) storage_reader = zip_file.ZIPStorageFileReader(test_file) storage_writer.MergeFromStorage(storage_reader) test_file = self._GetTestFilePath([u'pinfo_test.json.plaso']) storage_reader = zip_file.ZIPStorageFileReader(test_file) storage_writer.MergeFromStorage(storage_reader) storage_writer.Close()
def testOpenClose(self): """Tests the Open and Close functions.""" session = sessions.Session() storage_writer = fake_storage.FakeStorageWriter(session) storage_writer.Open() storage_writer.Close() storage_writer.Open() storage_writer.Close() storage_writer = fake_storage.FakeStorageWriter( session, storage_type=definitions.STORAGE_TYPE_TASK) storage_writer.Open() storage_writer.Close() storage_writer.Open() with self.assertRaises(IOError): storage_writer.Open() storage_writer.Close() with self.assertRaises(IOError): storage_writer.Close()