def testGetDisplayName(self): """Tests the GetDisplayName function.""" event_queue = single_process.SingleProcessQueue() parse_error_queue = single_process.SingleProcessQueue() parsers_mediator = self._GetParserMediator(event_queue, parse_error_queue, knowledge_base_values=None) with self.assertRaises(ValueError): _ = parsers_mediator.GetDisplayName(file_entry=None) test_path = self._GetTestFilePath([u'syslog.gz']) os_path_spec = path_spec_factory.Factory.NewPathSpec( definitions.TYPE_INDICATOR_OS, location=test_path) file_entry = path_spec_resolver.Resolver.OpenFileEntry(os_path_spec) display_name = parsers_mediator.GetDisplayName(file_entry=file_entry) expected_display_name = u'OS:{0:s}'.format(test_path) self.assertEqual(display_name, expected_display_name) gzip_path_spec = path_spec_factory.Factory.NewPathSpec( definitions.TYPE_INDICATOR_GZIP, parent=os_path_spec) file_entry = path_spec_resolver.Resolver.OpenFileEntry(gzip_path_spec) display_name = parsers_mediator.GetDisplayName(file_entry=file_entry) expected_display_name = u'GZIP:{0:s}'.format(test_path) self.assertEqual(display_name, expected_display_name)
def _ParsePlistWithPlugin( self, plugin_object, plist_name, top_level_object, knowledge_base_values=None): """Parses a plist using the plugin object. Args: plugin_object: the plugin object. plist_name: the name of the plist to parse. top_level_object: the top-level plist object. knowledge_base_values: optional dict containing the knowledge base values. Returns: An event object queue consumer object (instance of TestItemQueueConsumer). """ event_queue = single_process.SingleProcessQueue() event_queue_consumer = test_lib.TestItemQueueConsumer(event_queue) parse_error_queue = single_process.SingleProcessQueue() parser_mediator = self._GetParserMediator( event_queue, parse_error_queue, knowledge_base_values=knowledge_base_values) plugin_object.Process( parser_mediator, plist_name=plist_name, top_level=top_level_object) return event_queue_consumer
def testExtractionWorkerHashing(self): """Test that the worker sets up and runs hashing code correctly.""" collection_queue = single_process.SingleProcessQueue() storage_queue = single_process.SingleProcessQueue() parse_error_queue = single_process.SingleProcessQueue() event_queue_producer = single_process.SingleProcessItemQueueProducer( storage_queue) parse_error_queue_producer = single_process.SingleProcessItemQueueProducer( parse_error_queue) knowledge_base_object = knowledge_base.KnowledgeBase() parser_mediator = parsers_mediator.ParserMediator( event_queue_producer, parse_error_queue_producer, knowledge_base_object) resolver_context = context.Context() extraction_worker = worker.BaseEventExtractionWorker( 0, collection_queue, event_queue_producer, parse_error_queue_producer, parser_mediator, resolver_context=resolver_context) # We're going to check that the worker set up its internal state correctly. # pylint: disable=protected-access extraction_worker.SetHashers(hasher_names_string=u'md5') self.assertEqual(1, len(extraction_worker._hasher_names)) extraction_worker.InitializeParserObjects()
def _ParseFile(self, parser_object, path, knowledge_base_object): """Parses a file using the parser object. Args: parser_object: the parser object. path: the path of the file to parse. knowledge_base_object: the knowledge base object (instance of KnowledgeBase). Returns: An event object queue object (instance of Queue). """ event_queue = single_process.SingleProcessQueue() event_queue_producer = queue.ItemQueueProducer(event_queue) parse_error_queue = single_process.SingleProcessQueue() parser_mediator = parsers_mediator.ParserMediator( event_queue_producer, parse_error_queue, knowledge_base_object) path_spec = path_spec_factory.Factory.NewPathSpec( definitions.TYPE_INDICATOR_OS, location=path) file_entry = path_spec_resolver.Resolver.OpenFileEntry(path_spec) parser_mediator.SetFileEntry(file_entry) parser_object.Parse(parser_mediator) event_queue.SignalEndOfInput() return event_queue
def _ParseFileByPathSpec( self, parser_object, path_spec, knowledge_base_values=None): """Parses a file using the parser object. Args: parser_object: the parser object. path_spec: the path specification of the file to parse. knowledge_base_values: optional dict containing the knowledge base values. The default is None. Returns: An event object queue consumer object (instance of TestItemQueueConsumer). """ event_queue = single_process.SingleProcessQueue() event_queue_consumer = TestItemQueueConsumer(event_queue) parse_error_queue = single_process.SingleProcessQueue() parser_mediator = self._GetParserMediator( event_queue, parse_error_queue, knowledge_base_values=knowledge_base_values) file_entry = path_spec_resolver.Resolver.OpenFileEntry(path_spec) parser_mediator.SetFileEntry(file_entry) # AppendToParserChain needs to be run after SetFileEntry. parser_mediator.AppendToParserChain(parser_object) parser_object.Parse(parser_mediator) return event_queue_consumer
def _ParseEseDbFileWithPlugin(self, path_segments, plugin_object, knowledge_base_values=None): """Parses a file as an ESE database file and returns an event generator. Args: path_segments: The path to the ESE database test file. plugin_object: The plugin object that is used to extract an event generator. knowledge_base_values: optional dict containing the knowledge base values. The default is None. Returns: An event object queue consumer object (instance of TestEventObjectQueueConsumer). """ event_queue = single_process.SingleProcessQueue() event_queue_consumer = test_lib.TestEventObjectQueueConsumer( event_queue) parse_error_queue = single_process.SingleProcessQueue() parser_mediator = self._GetParserMediator( event_queue, parse_error_queue, knowledge_base_values=knowledge_base_values) esedb_file = self._OpenEseDbFile(path_segments) cache = esedb.EseDbCache() plugin_object.Process(parser_mediator, database=esedb_file, cache=cache) return event_queue_consumer
def testGetDisplayName(self): """Tests the GetDisplayName function.""" event_queue = single_process.SingleProcessQueue() parse_error_queue = single_process.SingleProcessQueue() parsers_mediator = self._GetParserMediator(event_queue, parse_error_queue, knowledge_base_values=None) with self.assertRaises(ValueError): _ = parsers_mediator.GetDisplayName(file_entry=None) test_path = self._GetTestFilePath([u'syslog.gz']) os_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=test_path) file_entry = path_spec_resolver.Resolver.OpenFileEntry(os_path_spec) display_name = parsers_mediator.GetDisplayName(file_entry=file_entry) expected_display_name = u'OS:{0:s}'.format(test_path) self.assertEqual(display_name, expected_display_name) gzip_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_GZIP, parent=os_path_spec) file_entry = path_spec_resolver.Resolver.OpenFileEntry(gzip_path_spec) display_name = parsers_mediator.GetDisplayName(file_entry=file_entry) expected_display_name = u'GZIP:{0:s}'.format(test_path) self.assertEqual(display_name, expected_display_name) test_path = self._GetTestFilePath([u'vsstest.qcow2']) os_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=test_path) qcow_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_QCOW, parent=os_path_spec) vshadow_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_VSHADOW, location=u'/vss2', store_index=1, parent=qcow_path_spec) tsk_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_TSK, inode=35, location=u'/syslog.gz', parent=vshadow_path_spec) file_entry = path_spec_resolver.Resolver.OpenFileEntry(tsk_path_spec) display_name = parsers_mediator.GetDisplayName(file_entry=file_entry) expected_display_name = u'VSS2:TSK:/syslog.gz' self.assertEqual(display_name, expected_display_name) parsers_mediator.SetTextPrepend(u'C:') display_name = parsers_mediator.GetDisplayName(file_entry=file_entry) expected_display_name = u'VSS2:TSK:C:/syslog.gz' self.assertEqual(display_name, expected_display_name)
def _ParseKeyWithPlugin(self, plugin_object, winreg_key, knowledge_base_values=None, file_entry=None, parser_chain=None): """Parses a key within a Windows Registry file using the plugin object. Args: plugin_object: The plugin object. winreg_key: The Windows Registry Key. knowledge_base_values: Optional dict containing the knowledge base values. The default is None. file_entry: Optional file entry object (instance of dfvfs.FileEntry). The default is None. parser_chain: Optional string containing the parsing chain up to this point. The default is None. Returns: An event object queue consumer object (instance of TestEventObjectQueueConsumer). """ self.assertNotEqual(winreg_key, None) event_queue = single_process.SingleProcessQueue() event_queue_consumer = test_lib.TestEventObjectQueueConsumer( event_queue) parse_error_queue = single_process.SingleProcessQueue() parser_mediator = self._GetParserMediator( event_queue, parse_error_queue, knowledge_base_values=knowledge_base_values) parser_mediator.SetFileEntry(file_entry) # Most tests aren't explicitly checking for parser chain values, # or setting them, so we'll just append the plugin name if no explicit # parser chain argument is supplied. # pylint: disable=protected-access if parser_chain is None: # AppendToParserChain needs to be run after SetFileEntry. parser_mediator.AppendToParserChain(plugin_object) else: # In the rare case that a test is checking for a particular chain, we # provide a way set it directly. There's no public API for this, # as access to the parser chain should be very infrequent. parser_mediator._parser_chain_components = parser_chain.split(u'/') plugin_object.Process(parser_mediator, key=winreg_key) return event_queue_consumer
def _ParseDatabaseFileWithPlugin(self, plugin_object, path, cache=None, knowledge_base_values=None): """Parses a file as a SQLite database with a specific plugin. Args: plugin_object: The plugin object that is used to extract an event generator. path: The path to the SQLite database file. cache: A cache object (instance of SQLiteCache). knowledge_base_values: optional dict containing the knowledge base values. The default is None. Returns: An event object queue consumer object (instance of TestItemQueueConsumer). """ event_queue = single_process.SingleProcessQueue() event_queue_consumer = test_lib.TestItemQueueConsumer(event_queue) parse_error_queue = single_process.SingleProcessQueue() parser_mediator = self._GetParserMediator( event_queue, parse_error_queue, knowledge_base_values=knowledge_base_values) path_spec = path_spec_factory.Factory.NewPathSpec( definitions.TYPE_INDICATOR_OS, location=path) file_entry = path_spec_resolver.Resolver.OpenFileEntry(path_spec) parser_mediator.SetFileEntry(file_entry) # AppendToParserChain needs to be run after SetFileEntry. parser_mediator.AppendToParserChain(plugin_object) database = sqlite.SQLiteDatabase(file_entry.name) file_object = file_entry.GetFileObject() try: database.Open(file_object) finally: file_object.close() try: plugin_object.Process(parser_mediator, cache=cache, database=database) finally: database.Close() return event_queue_consumer
def testRun(self): """Tests the Run function.""" test_file = self._GetTestFilePath([u'storage.json.plaso']) storage_object = storage_zip_file.StorageFile( test_file, read_only=True) test_path_spec_queue = single_process.SingleProcessQueue() test_collector = engine.PathSpecQueueProducer( test_path_spec_queue, storage_object) test_collector.Run() test_collector_queue_consumer = TestPathSpecQueueConsumer( test_path_spec_queue) test_collector_queue_consumer.ConsumeItems() self.assertEqual(test_collector_queue_consumer.number_of_path_specs, 2) expected_path_specs = [ u'type: OS, location: /tmp/test/test_data/syslog\n', u'type: OS, location: /tmp/test/test_data/syslog\n'] path_specs = [] for path_spec_object in test_collector_queue_consumer.path_specs: path_specs.append(path_spec_object.comparable) self.assertEqual(sorted(path_specs), sorted(expected_path_specs))
def testViperLookup(self): """Tests for the Viper analysis plugin.""" event_queue = single_process.SingleProcessQueue() knowledge_base = self._SetUpKnowledgeBase() # Fill the incoming queue with events. test_queue_producer = queue.ItemQueueProducer(event_queue) events = [ self._CreateTestEventObject(test_event) for test_event in self.TEST_EVENTS ] test_queue_producer.ProduceItems(events) # Set up the plugin. analysis_plugin = viper.ViperAnalysisPlugin(event_queue) analysis_plugin.SetProtocol(u'http') analysis_plugin.SetHost(u'localhost') # Run the analysis plugin. analysis_report_queue_consumer = self._RunAnalysisPlugin( analysis_plugin, knowledge_base) analysis_reports = self._GetAnalysisReportsFromQueue( analysis_report_queue_consumer) self.assertEqual(len(analysis_reports), 1) report = analysis_reports[0] tags = report.GetTags() self.assertEqual(len(tags), 1) tag = tags[0] self.assertEqual(tag.event_uuid, u'8') expected_string = ( u'File is present in Viper. Projects: \"default\" Tags \"' u'rat, darkcomet\"') self.assertEqual(tag.tags[0], expected_string)
def _Extract(self, source_path_specs, destination_path, remove_duplicates=True): """Extracts files. Args: source_path_specs: list of path specifications (instances of dfvfs.PathSpec) to process. destination_path: the path where the extracted files should be stored. remove_duplicates: optional boolean value to indicate if files with duplicate content should be removed. The default is True. """ if not os.path.isdir(destination_path): os.makedirs(destination_path) input_queue = single_process.SingleProcessQueue() image_collector = collector.Collector(input_queue) image_collector.Collect(source_path_specs) file_saver = FileSaver(skip_duplicates=remove_duplicates) input_queue_consumer = ImageExtractorQueueConsumer( input_queue, file_saver, destination_path, self._filter_collection) input_queue_consumer.ConsumeItems()
def _RunAnalysisPlugin(self, analysis_plugin, knowledge_base_object, output_format=u'text'): """Analyzes an event object queue using the plugin object. Args: analysis_plugin: the analysis plugin object (instance of AnalysisPlugin). knowledge_base_object: the knowledge base object (instance of KnowledgeBase). output_format: Optional output format. The default is 'text'. Returns: An event object queue object (instance of Queue). """ analysis_report_queue = single_process.SingleProcessQueue() analysis_report_queue_consumer = TestAnalysisReportQueueConsumer( analysis_report_queue) analysis_report_queue_producer = queue.ItemQueueProducer( analysis_report_queue) analysis_mediator = mediator.AnalysisMediator( analysis_report_queue_producer, knowledge_base_object, output_format=output_format) analysis_plugin.RunPlugin(analysis_mediator) analysis_report_queue.SignalEndOfInput() return analysis_report_queue_consumer
def testUniqueDomainExtraction(self): """Tests for the unique domains plugin.""" event_queue = single_process.SingleProcessQueue() knowledge_base = self._SetUpKnowledgeBase() # Fill the incoming queue with events. test_queue_producer = queue.ItemQueueProducer(event_queue) events = [ self._CreateTestEventObject(test_event) for test_event in self.TEST_EVENTS ] test_queue_producer.ProduceItems(events) # Set up the plugin. analysis_plugin = unique_domains_visited.UniqueDomainsVisitedPlugin( event_queue) analysis_report_queue_consumer = self._RunAnalysisPlugin( analysis_plugin, knowledge_base) analysis_reports = self._GetAnalysisReportsFromQueue( analysis_report_queue_consumer) self.assertEqual(len(analysis_reports), 1) report_text = analysis_reports[0].GetString() for event_object in self.TEST_EVENTS: self.assertIn(event_object.get(u'domain', u''), report_text)
def _Extract(self, destination_path, remove_duplicates=True): """Extracts files. Args: destination_path: the path where the extracted files should be stored. remove_duplicates: optional boolean value to indicate if files with duplicate content should be removed. The default is True. """ if not os.path.isdir(destination_path): os.makedirs(destination_path) input_queue = single_process.SingleProcessQueue() # TODO: add support to handle multiple partitions. self._source_path_spec = self.GetSourcePathSpec() image_collector = collector.Collector( input_queue, self._source_path, self._source_path_spec) image_collector.Collect() file_saver = FileSaver(skip_duplicates=remove_duplicates) input_queue_consumer = ImageExtractorQueueConsumer( input_queue, file_saver, destination_path, self._filter_collection) input_queue_consumer.ConsumeItems()
def testFileSystemCollection(self): """Test collection on the file system.""" test_files = [ self._GetTestFilePath([u'syslog.tgz']), self._GetTestFilePath([u'syslog.zip']), self._GetTestFilePath([u'syslog.bz2']), self._GetTestFilePath([u'wtmp.1'])] with shared_test_lib.TempDirectory() as dirname: for a_file in test_files: shutil.copy(a_file, dirname) path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=dirname) test_path_spec_queue = single_process.SingleProcessQueue() resolver_context = context.Context() test_collector = collector.Collector( test_path_spec_queue, resolver_context=resolver_context) test_collector.Collect([path_spec]) test_collector_queue_consumer = TestCollectorQueueConsumer( test_path_spec_queue) test_collector_queue_consumer.ConsumeItems() self.assertEqual(test_collector_queue_consumer.number_of_path_specs, 4)
def testVirusTotalLookup(self): """Tests for the VirusTotal analysis plugin.""" event_queue = single_process.SingleProcessQueue() knowledge_base = self._SetUpKnowledgeBase() # Fill the incoming queue with events. test_queue_producer = queue.ItemQueueProducer(event_queue) events = [ self._CreateTestEventObject(test_event) for test_event in self.TEST_EVENTS ] test_queue_producer.ProduceItems(events) analysis_plugin = virustotal.VirusTotalAnalysisPlugin(event_queue) analysis_plugin.SetAPIKey(self.FAKE_API_KEY) # Run the analysis plugin. analysis_report_queue_consumer = self._RunAnalysisPlugin( analysis_plugin, knowledge_base) analysis_reports = self._GetAnalysisReportsFromQueue( analysis_report_queue_consumer) self.assertEqual(len(analysis_reports), 1) report = analysis_reports[0] tags = report.GetTags() self.assertEqual(len(tags), 1) tag = tags[0] self.assertEqual(tag.event_uuid, u'8') self.assertEqual(tag.tags[0], u'VirusTotal Detections 10')
def ParseFile(file_entry): """Parse a file given a file entry or path and return a list of results. Args: file_entry: Either a file entry object (instance of dfvfs.FileEntry) or a string containing a path (absolute or relative) to a local file. Returns: A list of event object (instance of EventObject) that were extracted from the file (or an empty list if no events were extracted). """ if not file_entry: return if isinstance(file_entry, basestring): file_entry = OpenOSFile(file_entry) # Set up the engine. # TODO: refactor and add queue limit. collection_queue = single_process.SingleProcessQueue() storage_queue = single_process.SingleProcessQueue() parse_error_queue = single_process.SingleProcessQueue() engine_object = engine.BaseEngine(collection_queue, storage_queue, parse_error_queue) # Create a worker. worker_object = engine_object.CreateExtractionWorker(0) # TODO: add support for parser_filter_string. worker_object.InitializeParserObjects() worker_object.ParseFileEntry(file_entry) collection_queue.SignalEndOfInput() engine_object.SignalEndOfInputStorageQueue() results = [] while True: try: item = storage_queue.PopItem() except errors.QueueEmpty: break if isinstance(item, queue.QueueEndOfInput): break results.append(item) return results
def testFileSystemWithFilterCollection(self): """Test collection on the file system with a filter.""" dirname = u'.' path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=dirname) filter_name = '' with tempfile.NamedTemporaryFile(delete=False) as temp_file: filter_name = temp_file.name temp_file.write('/test_data/testdir/filter_.+.txt\n') temp_file.write('/test_data/.+evtx\n') temp_file.write('/AUTHORS\n') temp_file.write('/does_not_exist/some_file_[0-9]+txt\n') test_collection_queue = single_process.SingleProcessQueue() resolver_context = context.Context() test_collector = collector.Collector(test_collection_queue, dirname, path_spec, resolver_context=resolver_context) find_specs = engine_utils.BuildFindSpecsFromFile(filter_name) test_collector.SetFilter(find_specs) test_collector.Collect() test_collector_queue_consumer = TestCollectorQueueConsumer( test_collection_queue) test_collector_queue_consumer.ConsumeItems() try: os.remove(filter_name) except (OSError, IOError) as exception: logging.warning( (u'Unable to remove temporary file: {0:s} with error: {1:s}' ).format(filter_name, exception)) # Two files with test_data/testdir/filter_*.txt, AUTHORS # and test_data/System.evtx. self.assertEqual(test_collector_queue_consumer.number_of_path_specs, 4) paths = test_collector_queue_consumer.GetFilePaths() current_directory = os.getcwd() expected_path = os.path.join(current_directory, u'test_data', u'testdir', u'filter_1.txt') self.assertTrue(expected_path in paths) expected_path = os.path.join(current_directory, u'test_data', u'testdir', u'filter_2.txt') self.assertFalse(expected_path in paths) expected_path = os.path.join(current_directory, u'test_data', u'testdir', u'filter_3.txt') self.assertTrue(expected_path in paths) expected_path = os.path.join(current_directory, u'AUTHORS') self.assertTrue(expected_path in paths)
def testImageWithFilterCollection(self): """Test collection on a storage media image file with a filter.""" test_file = self._GetTestFilePath([u'ímynd.dd']) volume_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=test_file) path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_TSK, location=u'/', parent=volume_path_spec) filter_name = '' with tempfile.NamedTemporaryFile(delete=False) as temp_file: filter_name = temp_file.name temp_file.write('/a_directory/.+zip\n') temp_file.write('/a_directory/another.+\n') temp_file.write('/passwords.txt\n') test_collection_queue = single_process.SingleProcessQueue() resolver_context = context.Context() test_collector = collector.Collector(test_collection_queue, test_file, path_spec, resolver_context=resolver_context) find_specs = engine_utils.BuildFindSpecsFromFile(filter_name) test_collector.SetFilter(find_specs) test_collector.Collect() test_collector_queue_consumer = TestCollectorQueueConsumer( test_collection_queue) test_collector_queue_consumer.ConsumeItems() try: os.remove(filter_name) except (OSError, IOError) as exception: logging.warning( (u'Unable to remove temporary file: {0:s} with error: {1:s}' ).format(filter_name, exception)) self.assertEqual(test_collector_queue_consumer.number_of_path_specs, 2) paths = test_collector_queue_consumer.GetFilePaths() # path_specs[0] # type: TSK # file_path: '/a_directory/another_file' # container_path: 'test_data/ímynd.dd' # image_offset: 0 self.assertEqual(paths[0], u'/a_directory/another_file') # path_specs[1] # type: TSK # file_path: '/passwords.txt' # container_path: 'test_data/ímynd.dd' # image_offset: 0 self.assertEqual(paths[1], u'/passwords.txt')
def setUp(self): """Makes preparations before running an individual test.""" knowledge_base = self._SetUpKnowledgeBase() analysis_report_queue = single_process.SingleProcessQueue() analysis_report_queue_producer = queue.ItemQueueProducer( analysis_report_queue) self._analysis_mediator = mediator.AnalysisMediator( analysis_report_queue_producer, knowledge_base)
def CreateParserMediator(self, event_queue=None): """Create a parser mediator object. Args: event_queue: an optional event queue object (instance of Queue). Returns: A parser mediator object (instance of parsers_mediator.ParserMediator). """ if event_queue is None: event_queue = single_process.SingleProcessQueue() event_queue_producer = queue.ItemQueueProducer(event_queue) parse_error_queue = single_process.SingleProcessQueue() parse_error_queue_producer = queue.ItemQueueProducer(parse_error_queue) return parsers_mediator.ParserMediator(event_queue_producer, parse_error_queue_producer, self.knowledge_base_object)
def testParseTagFile(self): """Test that the tagging plugin can parse a tag definition file.""" event_queue = single_process.SingleProcessQueue() analysis_plugin = tagging.TaggingPlugin(event_queue) # pylint: disable=protected-access tags = analysis_plugin._ParseTaggingFile( self._GetTestFilePath([self._TEST_TAG_FILE_NAME])) self.assertEqual(len(tags), 2) self.assertIn(u'application_execution', tags.keys()) self.assertIn(u'file_downloaded', tags.keys())
def setUp(self): """Sets up the needed objects used throughout the test.""" knowledge_base = self._SetUpKnowledgeBase() analysis_report_queue = single_process.SingleProcessQueue() analysis_report_queue_producer = queue.ItemQueueProducer( analysis_report_queue) self._analysis_mediator = mediator.AnalysisMediator( analysis_report_queue_producer, knowledge_base)
def _ParseOleCfFileWithPlugin(self, path, plugin_object, knowledge_base_values=None): """Parses a file as an OLE compound file and returns an event generator. Args: path: The path to the OLE CF test file. plugin_object: The plugin object that is used to extract an event generator. knowledge_base_values: optional dict containing the knowledge base values. The default is None. Returns: An event object queue consumer object (instance of TestEventObjectQueueConsumer). """ event_queue = single_process.SingleProcessQueue() event_queue_consumer = test_lib.TestEventObjectQueueConsumer( event_queue) parse_error_queue = single_process.SingleProcessQueue() parser_mediator = self._GetParserMediator( event_queue, parse_error_queue, knowledge_base_values=knowledge_base_values) olecf_file = self._OpenOleCfFile(path) file_entry = self._GetTestFileEntryFromPath([path]) parser_mediator.SetFileEntry(file_entry) # Get a list of all root items from the OLE CF file. root_item = olecf_file.root_item item_names = [item.name for item in root_item.sub_items] plugin_object.Process(parser_mediator, root_item=root_item, item_names=item_names) return event_queue_consumer
def BuildParserMediator(self, event_queue=None): """Build the parser object. Args: event_queue: An event queue object (instance of Queue). This is optional and if a queue is not provided a default one will be provided. Returns: A parser mediator object (instance of parsers_mediator.ParserMediator). """ if event_queue is None: event_queue = single_process.SingleProcessQueue() event_queue_producer = queue.ItemQueueProducer(event_queue) parse_error_queue = single_process.SingleProcessQueue() parse_error_queue_producer = queue.ItemQueueProducer(parse_error_queue) return parsers_mediator.ParserMediator( event_queue_producer, parse_error_queue_producer, PregCache.knowledge_base_object)
def testInvalidTagParsing(self): """Test parsing of definition files that contain invalid directives.""" event_queue = single_process.SingleProcessQueue() analysis_plugin = tagging.TaggingPlugin(event_queue) # pylint: disable=protected-access tags = analysis_plugin._ParseTaggingFile( self._GetTestFilePath([self._INVALID_TEST_TAG_FILE_NAME])) self.assertEqual(len(tags), 3) self.assertTrue(u'Invalid Tag' in tags) self.assertEqual(len(tags[u'Invalid Tag']), 0) self.assertTrue(u'Partially Valid Tag' in tags) self.assertEqual(len(tags[u'Partially Valid Tag']), 1)
def testPushPopItem(self): """Tests the PushItem and PopItem functions.""" test_queue = single_process.SingleProcessQueue() for item in self._ITEMS: test_queue.PushItem(item) test_queue_consumer = test_lib.TestQueueConsumer(test_queue) test_queue_consumer.ConsumeItems() expected_number_of_items = len(self._ITEMS) self.assertEqual( test_queue_consumer.number_of_items, expected_number_of_items)
def _ParseFileByPathSpec(self, parser_object, path_spec, knowledge_base_values=None): """Parses a file using the parser object. Args: parser_object: the parser object. path_spec: the path specification of the file to parse. knowledge_base_values: optional dict containing the knowledge base values. Returns: An event object queue consumer object (instance of TestItemQueueConsumer). """ event_queue = single_process.SingleProcessQueue() event_queue_consumer = TestItemQueueConsumer(event_queue) parse_error_queue = single_process.SingleProcessQueue() parser_mediator = self._GetParserMediator( event_queue, parse_error_queue, knowledge_base_values=knowledge_base_values) file_entry = path_spec_resolver.Resolver.OpenFileEntry(path_spec) parser_mediator.SetFileEntry(file_entry) if isinstance(parser_object, interface.FileEntryParser): parser_object.Parse(parser_mediator) elif isinstance(parser_object, interface.FileObjectParser): file_object = file_entry.GetFileObject() try: parser_object.Parse(parser_mediator, file_object) finally: file_object.close() return event_queue_consumer
def _ParseFileWithPlugin( self, plugin_name, path, knowledge_base_values=None): """Parses a syslog file with a specific plugin. Args: plugin_name: a string containing the name of the plugin. path: a string containing the path to the syslog file. knowledge_base_values: optional dictionary containing the knowledge base values. Returns: An event object queue consumer object (instance of ItemQueueConsumer). """ event_queue = single_process.SingleProcessQueue() event_queue_consumer = test_lib.TestItemQueueConsumer(event_queue) parse_error_queue = single_process.SingleProcessQueue() parser_mediator = self._GetParserMediator( event_queue, parse_error_queue, knowledge_base_values=knowledge_base_values) path_spec = path_spec_factory.Factory.NewPathSpec( definitions.TYPE_INDICATOR_OS, location=path) file_entry = path_spec_resolver.Resolver.OpenFileEntry(path_spec) parser_mediator.SetFileEntry(file_entry) parser_object = syslog.SyslogParser() parser_object.EnablePlugins([plugin_name]) file_object = file_entry.GetFileObject() try: parser_object.Parse(parser_mediator, file_object) finally: file_object.close() return event_queue_consumer