def _ParseKeyWithPlugin(self, plugin_object, winreg_key, knowledge_base_values=None): """Parses a key within a Windows Registry file using the plugin object. Args: plugin_object: the plugin object. winreg_key: the Windows Registry Key. knowledge_base_values: optional dict containing the knowledge base values. The default is None. Returns: An event object queue consumer object (instance of TestEventObjectQueueConsumer). """ self.assertNotEquals(winreg_key, None) event_queue = queue.SingleThreadedQueue() event_queue_consumer = test_lib.TestEventObjectQueueConsumer( event_queue) parse_error_queue = queue.SingleThreadedQueue() parser_context = self._GetParserContext( event_queue, parse_error_queue, knowledge_base_values=knowledge_base_values) plugin_object.Process(parser_context, key=winreg_key) return event_queue_consumer
def _ParsePlistWithPlugin(self, plugin_object, plist_name, top_level_object, knowledge_base_values=None): """Parses a plist using the plugin object. Args: plugin_object: the plugin object. plist_name: the name of the plist to parse. top_level_object: the top-level plist object. knowledge_base_values: optional dict containing the knowledge base values. The default is None. Returns: An event object queue consumer object (instance of TestEventObjectQueueConsumer). """ event_queue = queue.SingleThreadedQueue() event_queue_consumer = test_lib.TestEventObjectQueueConsumer( event_queue) parse_error_queue = queue.SingleThreadedQueue() parser_context = self._GetParserContext( event_queue, parse_error_queue, knowledge_base_values=knowledge_base_values) plugin_object.Process(parser_context, plist_name=plist_name, top_level=top_level_object) return event_queue_consumer
def testFileSystemCollection(self): """Test collection on the file system.""" test_files = [ self._GetTestFilePath(['syslog.tgz']), self._GetTestFilePath(['syslog.zip']), self._GetTestFilePath(['syslog.bz2']), self._GetTestFilePath(['wtmp.1'])] with TempDirectory() as dirname: for a_file in test_files: shutil.copy(a_file, dirname) path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=dirname) test_collection_queue = queue.SingleThreadedQueue() test_store = queue.SingleThreadedQueue() resolver_context = context.Context() test_collector = collector.Collector( test_collection_queue, test_store, dirname, path_spec, resolver_context=resolver_context) test_collector.Collect() test_collector_queue_consumer = TestCollectorQueueConsumer( test_collection_queue) test_collector_queue_consumer.ConsumePathSpecs() self.assertEquals(test_collector_queue_consumer.number_of_path_specs, 4)
def _ParseFile(self, parser_object, path, knowledge_base_object): """Parses a file using the parser object. Args: parser_object: the parser object. path: the path of the file to parse. knowledge_base_object: the knowledge base object (instance of KnowledgeBase). Returns: An event object queue object (instance of Queue). """ event_queue = queue.SingleThreadedQueue() event_queue_producer = queue.EventObjectQueueProducer(event_queue) parse_error_queue = queue.SingleThreadedQueue() parser_context = parsers_context.ParserContext(event_queue_producer, parse_error_queue, knowledge_base_object) path_spec = path_spec_factory.Factory.NewPathSpec( definitions.TYPE_INDICATOR_OS, location=path) file_entry = path_spec_resolver.Resolver.OpenFileEntry(path_spec) parser_object.Parse(parser_context, file_entry) event_queue.SignalEndOfInput() return event_queue
def _ParseEseDbFileWithPlugin(self, path, plugin_object, knowledge_base_values=None): """Parses a file as an ESE database file and returns an event generator. Args: path: The path to the ESE database test file. plugin_object: The plugin object that is used to extract an event generator. knowledge_base_values: optional dict containing the knowledge base values. The default is None. Returns: An event object queue consumer object (instance of TestEventObjectQueueConsumer). """ event_queue = queue.SingleThreadedQueue() event_queue_consumer = test_lib.TestEventObjectQueueConsumer( event_queue) parse_error_queue = queue.SingleThreadedQueue() parser_context = self._GetParserContext( event_queue, parse_error_queue, knowledge_base_values=knowledge_base_values) esedb_file = self._OpenEseDbFile(path) plugin_object.Process(parser_context, database=esedb_file) return event_queue_consumer
def _ParseFileByPathSpec( self, parser_object, path_spec, knowledge_base_values=None): """Parses a file using the parser object. Args: parser_object: the parser object. path_spec: the path specification of the file to parse. knowledge_base_values: optional dict containing the knowledge base values. The default is None. Returns: An event object queue consumer object (instance of TestEventObjectQueueConsumer). """ event_queue = queue.SingleThreadedQueue() event_queue_consumer = TestEventObjectQueueConsumer(event_queue) parse_error_queue = queue.SingleThreadedQueue() parser_context = self._GetParserContext( event_queue, parse_error_queue, knowledge_base_values=knowledge_base_values) file_entry = path_spec_resolver.Resolver.OpenFileEntry(path_spec) parser_object.Parse(parser_context, file_entry) return event_queue_consumer
def testFileSystemWithFilterCollection(self): """Test collection on the file system with a filter.""" dirname = u'.' path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=dirname) filter_name = '' with tempfile.NamedTemporaryFile(delete=False) as temp_file: filter_name = temp_file.name temp_file.write('/test_data/testdir/filter_.+.txt\n') temp_file.write('/test_data/.+evtx\n') temp_file.write('/AUTHORS\n') temp_file.write('/does_not_exist/some_file_[0-9]+txt\n') test_collection_queue = queue.SingleThreadedQueue() test_store = queue.SingleThreadedQueue() resolver_context = context.Context() test_collector = collector.Collector( test_collection_queue, test_store, dirname, path_spec, resolver_context=resolver_context) find_specs = engine_utils.BuildFindSpecsFromFile(filter_name) test_collector.SetFilter(find_specs) test_collector.Collect() test_collector_queue_consumer = TestCollectorQueueConsumer( test_collection_queue) test_collector_queue_consumer.ConsumePathSpecs() try: os.remove(filter_name) except (OSError, IOError) as exception: logging.warning(( u'Unable to remove temporary file: {0:s} with error: {1:s}').format( filter_name, exception)) # Two files with test_data/testdir/filter_*.txt, AUTHORS # and test_data/System.evtx. self.assertEquals(test_collector_queue_consumer.number_of_path_specs, 4) paths = test_collector_queue_consumer.GetFilePaths() current_directory = os.getcwd() expected_path = os.path.join( current_directory, 'test_data', 'testdir', 'filter_1.txt') self.assertTrue(expected_path in paths) expected_path = os.path.join( current_directory, 'test_data', 'testdir', 'filter_2.txt') self.assertFalse(expected_path in paths) expected_path = os.path.join( current_directory, 'test_data', 'testdir', 'filter_3.txt') self.assertTrue(expected_path in paths) expected_path = os.path.join( current_directory, 'AUTHORS') self.assertTrue(expected_path in paths)
def ParseFile(file_entry): """Parse a file given a file entry and yield results.""" if not file_entry: return # Create the necessary items. proc_queue = queue.SingleThreadedQueue() storage_queue = queue.SingleThreadedQueue() storage_queue_producer = queue.EventObjectQueueProducer(storage_queue) pre_obj = event.PreprocessObject() all_parsers = putils.FindAllParsers(pre_obj) # Create a worker. worker_object = worker.EventExtractionWorker( 'my_worker', proc_queue, storage_queue_producer, pre_obj, all_parsers) # Parse the file. worker_object.ParseFile(file_entry) storage_queue.SignalEndOfInput() proc_queue.SignalEndOfInput() while True: try: item = storage_queue.PopItem() except errors.QueueEmpty: break if isinstance(item, queue.QueueEndOfInput): break yield item
def testImageWithFilterCollection(self): """Test collection on a storage media image file with a filter.""" test_file = self._GetTestFilePath(['image.dd']) volume_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=test_file) path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_TSK, location=u'/', parent=volume_path_spec) filter_name = '' with tempfile.NamedTemporaryFile(delete=False) as temp_file: filter_name = temp_file.name temp_file.write('/a_directory/.+zip\n') temp_file.write('/a_directory/another.+\n') temp_file.write('/passwords.txt\n') test_collection_queue = queue.SingleThreadedQueue() test_storage_queue = queue.SingleThreadedQueue() test_storage_queue_producer = queue.EventObjectQueueProducer( test_storage_queue) resolver_context = context.Context() test_collector = collector.Collector( test_collection_queue, test_storage_queue_producer, test_file, path_spec, resolver_context=resolver_context) find_specs = engine_utils.BuildFindSpecsFromFile(filter_name) test_collector.SetFilter(find_specs) test_collector.Collect() test_collector_queue_consumer = TestCollectorQueueConsumer( test_collection_queue) test_collector_queue_consumer.ConsumePathSpecs() try: os.remove(filter_name) except (OSError, IOError) as exception: logging.warning(( u'Unable to remove temporary file: {0:s} with error: {1:s}').format( filter_name, exception)) self.assertEquals(test_collector_queue_consumer.number_of_path_specs, 2) paths = test_collector_queue_consumer.GetFilePaths() # path_specs[0] # type: TSK # file_path: '/a_directory/another_file' # container_path: 'test_data/image.dd' # image_offset: 0 self.assertEquals(paths[0], u'/a_directory/another_file') # path_specs[1] # type: TSK # file_path: '/passwords.txt' # container_path: 'test_data/image.dd' # image_offset: 0 self.assertEquals(paths[1], u'/passwords.txt')
def _ExtractWithExtensions(self, extensions, destination_path): """Extracts files using extensions. Args: extensions: a list of extensions. destination_path: the path where the extracted files should be stored. """ logging.info( u'Finding files with extensions: {0:s}'.format(extensions)) if not os.path.isdir(destination_path): os.makedirs(destination_path) input_queue = queue.SingleThreadedQueue() # TODO: add support to handle multiple partitions. self._source_path_spec = self.GetSourcePathSpec() image_collector = collector.Collector(input_queue, self._source_path, self._source_path_spec) image_collector.Collect() FileSaver.calc_md5 = self._remove_duplicates input_queue_consumer = ImageExtractorQueueConsumer( input_queue, extensions, destination_path) input_queue_consumer.ConsumePathSpecs()
def testEngine(self): """Test the engine functionality.""" collection_queue = queue.SingleThreadedQueue() storage_queue = queue.SingleThreadedQueue() resolver_context = context.Context() test_engine = engine.Engine(collection_queue, storage_queue) self.assertNotEquals(test_engine, None) source_path = os.path.join(self._TEST_DATA_PATH, u'image.dd') os_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=source_path) source_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_TSK, location=u'/', parent=os_path_spec) test_engine.SetSource(source_path_spec, resolver_context=resolver_context) self.assertFalse(test_engine.SourceIsDirectory()) self.assertFalse(test_engine.SourceIsFile()) self.assertTrue(test_engine.SourceIsStorageMediaImage()) test_searcher = test_engine.GetSourceFileSystemSearcher( resolver_context=resolver_context) self.assertNotEquals(test_searcher, None) self.assertIsInstance(test_searcher, file_system_searcher.FileSystemSearcher) pre_obj = event.PreprocessObject() test_engine.PreprocessSource(pre_obj, 'Windows') test_collector = test_engine.CreateCollector( False, vss_stores=None, filter_find_specs=None, resolver_context=resolver_context) self.assertNotEquals(test_collector, None) self.assertIsInstance(test_collector, collector.Collector) test_extraction_worker = test_engine.CreateExtractionWorker( 0, None, pre_obj) self.assertNotEquals(test_extraction_worker, None) self.assertIsInstance(test_extraction_worker, worker.EventExtractionWorker)
def ParseFile(file_entry): """Parse a file given a file entry or path and return a list of results. Args: file_entry: Either a file entry object (instance of dfvfs.FileEntry) or a string containing a path (absolute or relative) to a local file. Returns: A list of event object (instance of EventObject) that were extracted from the file (or an empty list if no events were extracted). """ if not file_entry: return if isinstance(file_entry, basestring): file_entry = OpenOSFile(file_entry) # Set up the engine. collection_queue = queue.SingleThreadedQueue() storage_queue = queue.SingleThreadedQueue() parse_error_queue = queue.SingleThreadedQueue() engine_object = engine.Engine(collection_queue, storage_queue, parse_error_queue) # Create a worker. worker_object = engine_object.CreateExtractionWorker('0') # TODO: add support for parser_filter_string. worker_object.InitalizeParserObjects() worker_object.ParseFileEntry(file_entry) collection_queue.SignalEndOfInput() engine_object.SignalEndOfInputStorageQueue() results = [] while True: try: item = storage_queue.PopItem() except errors.QueueEmpty: break if isinstance(item, queue.QueueEndOfInput): break results.append(item) return results
def setUp(self): """Sets up the needed objects used throughout the test.""" knowledge_base = self._SetUpKnowledgeBase() analysis_report_queue = queue.SingleThreadedQueue() analysis_report_queue_producer = queue.AnalysisReportQueueProducer( analysis_report_queue) self._analysis_context = context.AnalysisContext( analysis_report_queue_producer, knowledge_base)
def testImageCollection(self): """Test collection on a storage media image file. This images has two files: + logs/hidden.zip + logs/sys.tgz The hidden.zip file contains one file, syslog, which is the same for sys.tgz. The end results should therefore be: + logs/hidden.zip (unchanged) + logs/hidden.zip:syslog (the text file extracted out) + logs/sys.tgz (unchanged) + logs/sys.tgz (read as a GZIP file, so not compressed) + logs/sys.tgz:syslog.gz (A GZIP file from the TAR container) + logs/sys.tgz:syslog.gz:syslog (the extracted syslog file) This means that the collection script should collect 6 files in total. """ test_file = self._GetTestFilePath(['syslog_image.dd']) volume_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=test_file) path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_TSK, location=u'/', parent=volume_path_spec) test_collection_queue = queue.SingleThreadedQueue() test_storage_queue = queue.SingleThreadedQueue() test_storage_queue_producer = queue.EventObjectQueueProducer( test_storage_queue) resolver_context = context.Context() test_collector = collector.Collector( test_collection_queue, test_storage_queue_producer, test_file, path_spec, resolver_context=resolver_context) test_collector.Collect() test_collector_queue_consumer = TestCollectorQueueConsumer( test_collection_queue) test_collector_queue_consumer.ConsumePathSpecs() self.assertEquals(test_collector_queue_consumer.number_of_path_specs, 2)
def testAnalyzeFile(self): """Read a storage file that contains URL data and analyze it.""" incoming_queue = queue.SingleThreadedQueue() outgoing_queue = queue.SingleThreadedQueue() analysis_plugin = browser_search.AnalyzeBrowserSearchPlugin( self._pre_obj, incoming_queue, outgoing_queue) test_file = self._GetTestFilePath(['History']) event_generator = self._ParseFile(self._parser, test_file) test_queue_producer = queue.AnalysisPluginProducer(incoming_queue) test_queue_producer.ProduceEventObjects(event_generator) test_queue_producer.SignalEndOfInput() analysis_plugin.RunPlugin() outgoing_queue.SignalEndOfInput() test_analysis_plugin_consumer = test_lib.TestAnalysisPluginConsumer( outgoing_queue) test_analysis_plugin_consumer.ConsumeAnalysisReports() self.assertEquals( test_analysis_plugin_consumer.number_of_analysis_reports, 1) analysis_report = test_analysis_plugin_consumer.analysis_reports[0] # Due to the behavior of the join one additional empty string at the end # is needed to create the last empty line. expected_text = u'\n'.join([ u' == ENGINE: GoogleSearch ==', u'1 really really funny cats', u'1 java plugin', u'1 funnycats.exe', u'1 funny cats', u'', u'' ]) self.assertEquals(analysis_report.text, expected_text) self.assertEquals(analysis_report.plugin_name, 'browser_search') expected_keys = set([u'GoogleSearch']) self.assertEquals(set(analysis_report.report_dict.keys()), expected_keys)
def _ParseDatabaseFileWithPlugin(self, plugin_object, path, cache=None, knowledge_base_values=None): """Parses a file as a SQLite database with a specific plugin. Args: plugin_object: The plugin object that is used to extract an event generator. path: The path to the SQLite database file. cache: A cache object (instance of SQLiteCache). knowledge_base_values: optional dict containing the knowledge base values. The default is None. Returns: An event object queue consumer object (instance of TestEventObjectQueueConsumer). """ event_queue = queue.SingleThreadedQueue() event_queue_consumer = test_lib.TestEventObjectQueueConsumer( event_queue) parse_error_queue = queue.SingleThreadedQueue() parser_context = self._GetParserContext( event_queue, parse_error_queue, knowledge_base_values=knowledge_base_values) path_spec = path_spec_factory.Factory.NewPathSpec( definitions.TYPE_INDICATOR_OS, location=path) file_entry = path_spec_resolver.Resolver.OpenFileEntry(path_spec) with sqlite.SQLiteDatabase(file_entry) as database: plugin_object.Process(parser_context, cache=cache, database=database) return event_queue_consumer
def _ParseOleCfFileWithPlugin(self, path, plugin_object, knowledge_base_values=None): """Parses a file as an OLE compound file and returns an event generator. Args: path: The path to the OLE CF test file. plugin_object: The plugin object that is used to extract an event generator. knowledge_base_values: optional dict containing the knowledge base values. The default is None. Returns: An event object queue consumer object (instance of TestEventObjectQueueConsumer). """ event_queue = queue.SingleThreadedQueue() event_queue_consumer = test_lib.TestEventObjectQueueConsumer( event_queue) parse_error_queue = queue.SingleThreadedQueue() parser_context = self._GetParserContext( event_queue, parse_error_queue, knowledge_base_values=knowledge_base_values) olecf_file = self._OpenOleCfFile(path) # Get a list of all root items from the OLE CF file. root_item = olecf_file.root_item item_names = [item.name for item in root_item.sub_items] plugin_object.Process(parser_context, root_item=root_item, item_names=item_names) return event_queue_consumer
def testPushPopItem(self): """Tests the PushItem and PopItem functions.""" test_queue = queue.SingleThreadedQueue() for item in self._ITEMS: test_queue.PushItem(item) self.assertEquals(len(test_queue), len(self._ITEMS)) test_queue.SignalEndOfInput() test_queue_consumer = TestQueueConsumer(test_queue) test_queue_consumer.ConsumeItems() self.assertEquals(test_queue_consumer.number_of_items, len(self._ITEMS))
def testWinAnalyzePlugin(self): """Test the plugin against mock events.""" knowledge_base = self._SetUpKnowledgeBase( knowledge_base_values={'users': self.WIN_USERS}) event_queue = queue.SingleThreadedQueue() # Fill the incoming queue with events. test_queue_producer = queue.EventObjectQueueProducer(event_queue) test_queue_producer.ProduceEventObjects( [self._CreateTestEventObject(path) for path in self.WIN_PATHS]) test_queue_producer.SignalEndOfInput() # Initialize plugin. analysis_plugin = AnalyzeChromeExtensionTestPlugin(event_queue) # Run the analysis plugin. analysis_report_queue_consumer = self._RunAnalysisPlugin( analysis_plugin, knowledge_base) analysis_reports = self._GetAnalysisReportsFromQueue( analysis_report_queue_consumer) self.assertEquals(len(analysis_reports), 1) analysis_report = analysis_reports[0] self.assertEquals(analysis_plugin._sep, u'\\') # Due to the behavior of the join one additional empty string at the end # is needed to create the last empty line. expected_text = u'\n'.join([ u' == USER: dude ==', u' Google Keep - notes and lists [hmjkmjkepdijhoojdojkdfohbdgmmhki]', u'', u' == USER: frank ==', u' Google Play Music [icppfcnhkcmnfdhfhphakoifcfokfdhg]', u' YouTube [blpcfgokakmgnkcojhhkbfbldkacnbeo]', u'', u'' ]) self.assertEquals(analysis_report.text, expected_text) self.assertEquals(analysis_report.plugin_name, 'chrome_extension_test') expected_keys = set([u'frank', u'dude']) self.assertEquals(set(analysis_report.report_dict.keys()), expected_keys)
def testMacAnalyzerPlugin(self): """Test the plugin against mock events.""" knowledge_base = self._SetUpKnowledgeBase( knowledge_base_values={'users': self.MAC_USERS}) event_queue = queue.SingleThreadedQueue() # Fill the incoming queue with events. test_queue_producer = queue.EventObjectQueueProducer(event_queue) test_queue_producer.ProduceEventObjects( [self._CreateTestEventObject(path) for path in self.MAC_PATHS]) test_queue_producer.SignalEndOfInput() # Initialize plugin. analysis_plugin = AnalyzeChromeExtensionTestPlugin(event_queue) # Run the analysis plugin. analysis_report_queue_consumer = self._RunAnalysisPlugin( analysis_plugin, knowledge_base) analysis_reports = self._GetAnalysisReportsFromQueue( analysis_report_queue_consumer) self.assertEquals(len(analysis_reports), 1) analysis_report = analysis_reports[0] self.assertEquals(analysis_plugin._sep, u'/') # Due to the behavior of the join one additional empty string at the end # is needed to create the last empty line. expected_text = u'\n'.join([ u' == USER: dude ==', u' Google Drive [apdfllckaahabafndbhieahigkjlhalf]', u'', u' == USER: frank ==', u' Gmail [pjkljhegncpnkpknbcohdijeoejaedia]', u'', u'' ]) self.assertEquals(analysis_report.text, expected_text) self.assertEquals(analysis_report.plugin_name, 'chrome_extension_test') expected_keys = set([u'frank', u'dude']) self.assertEquals(set(analysis_report.report_dict.keys()), expected_keys)
def _RunAnalysisPlugin(self, analysis_plugin, knowledge_base_object): """Analyzes an event object queue using the plugin object. Args: analysis_plugin: the analysis plugin object (instance of AnalysisPlugin). knowledge_base_object: the knowledge base object (instance of KnowledgeBase). Returns: An event object queue object (instance of Queue). """ analysis_report_queue = queue.SingleThreadedQueue() analysis_report_queue_consumer = TestAnalysisReportQueueConsumer( analysis_report_queue) analysis_report_queue_producer = queue.AnalysisReportQueueProducer( analysis_report_queue) analysis_context = context.AnalysisContext( analysis_report_queue_producer, knowledge_base_object) analysis_plugin.RunPlugin(analysis_context) analysis_report_queue.SignalEndOfInput() return analysis_report_queue_consumer
def testMacAnalyzerPlugin(self): """Test the plugin against mock events.""" incoming_queue = queue.SingleThreadedQueue() outgoing_queue = queue.SingleThreadedQueue() pre_obj = event.PreprocessObject() # Fill in the user section pre_obj.users = self.MAC_USERS # Initialize plugin. analysis_plugin = chrome_extension.AnalyzeChromeExtensionPlugin( pre_obj, incoming_queue, outgoing_queue) # Test the user creation. user_paths = analysis_plugin._user_paths self.assertEquals(set(user_paths.keys()), set([u'frank', u'dude', u'hans', u'root'])) self.assertEquals(user_paths[u'frank'], u'/users/frank') self.assertEquals(user_paths[u'dude'], u'/users/dude') self.assertEquals(user_paths[u'hans'], u'/users/hans') self.assertEquals(user_paths[u'root'], u'/var/root') # Fill the incoming queue with events. test_queue_producer = queue.AnalysisPluginProducer(incoming_queue) test_queue_producer.ProduceEventObjects( [self._CreateTestEventObject(path) for path in self.MAC_PATHS]) test_queue_producer.SignalEndOfInput() # Run the analysis plugin. analysis_plugin.RunPlugin() outgoing_queue.SignalEndOfInput() test_analysis_plugin_consumer = test_lib.TestAnalysisPluginConsumer( outgoing_queue) test_analysis_plugin_consumer.ConsumeAnalysisReports() self.assertEquals( test_analysis_plugin_consumer.number_of_analysis_reports, 1) analysis_report = test_analysis_plugin_consumer.analysis_reports[0] # Test the username detection. self.assertEquals( analysis_plugin._GetUserNameFromPath(self.MAC_PATHS[0]), u'dude') self.assertEquals( analysis_plugin._GetUserNameFromPath(self.MAC_PATHS[4]), u'hans') self.assertEquals(analysis_plugin._sep, u'/') # Due to the behavior of the join one additional empty string at the end # is needed to create the last empty line. expected_text = u'\n'.join([ u' == USER: dude ==', u' Google Drive [apdfllckaahabafndbhieahigkjlhalf]', u'', u' == USER: frank ==', u' Gmail [pjkljhegncpnkpknbcohdijeoejaedia]', u'', u'' ]) self.assertEquals(analysis_report.text, expected_text) self.assertEquals(analysis_report.plugin_name, 'chrome_extension') expected_keys = set([u'frank', u'dude']) self.assertEquals(set(analysis_report.report_dict.keys()), expected_keys)
def testWinAnalyzePlugin(self): """Test the plugin against mock events.""" incoming_queue = queue.SingleThreadedQueue() outgoing_queue = queue.SingleThreadedQueue() pre_obj = event.PreprocessObject() # Fill in the user section pre_obj.users = self.WIN_USERS # Initialize plugin. analysis_plugin = chrome_extension.AnalyzeChromeExtensionPlugin( pre_obj, incoming_queue, outgoing_queue) # Test the user creation. user_paths = analysis_plugin._user_paths self.assertEquals(set(user_paths.keys()), set([u'frank', u'dude'])) self.assertEquals(user_paths[u'frank'], u'/users/frank') self.assertEquals(user_paths[u'dude'], u'/users/dude') # Fill the incoming queue with events. test_queue_producer = queue.AnalysisPluginProducer(incoming_queue) test_queue_producer.ProduceEventObjects( [self._CreateTestEventObject(path) for path in self.WIN_PATHS]) test_queue_producer.SignalEndOfInput() # Run the analysis plugin. analysis_plugin.RunPlugin() outgoing_queue.SignalEndOfInput() test_analysis_plugin_consumer = test_lib.TestAnalysisPluginConsumer( outgoing_queue) test_analysis_plugin_consumer.ConsumeAnalysisReports() self.assertEquals( test_analysis_plugin_consumer.number_of_analysis_reports, 1) analysis_report = test_analysis_plugin_consumer.analysis_reports[0] # Test the username detection. self.assertEquals( analysis_plugin._GetUserNameFromPath(self.WIN_PATHS[0]), u'dude') self.assertEquals( analysis_plugin._GetUserNameFromPath(self.WIN_PATHS[2]), u'frank') self.assertEquals(analysis_plugin._sep, u'\\') # Due to the behavior of the join one additional empty string at the end # is needed to create the last empty line. expected_text = u'\n'.join([ u' == USER: dude ==', u' Google Keep [hmjkmjkepdijhoojdojkdfohbdgmmhki]', u'', u' == USER: frank ==', u' Google Play Music [icppfcnhkcmnfdhfhphakoifcfokfdhg]', u' YouTube [blpcfgokakmgnkcojhhkbfbldkacnbeo]', u'', u'' ]) self.assertEquals(analysis_report.text, expected_text) self.assertEquals(analysis_report.plugin_name, 'chrome_extension') expected_keys = set([u'frank', u'dude']) self.assertEquals(set(analysis_report.report_dict.keys()), expected_keys)
def ProcessFile(options): """Process a file and produce profile results.""" if options.proto_file and os.path.isfile(options.proto_file): with open(options.proto_file) as fh: proto_string = fh.read() proto = transmission_pb2.PathSpec() try: text_format.Merge(proto_string, proto) except text_format.ParseError as exception: logging.error( u'Unable to parse file, error: {}'.format(exception)) sys.exit(1) serializer = protobuf_serializer.ProtobufPathSpecSerializer path_spec = serializer.ReadSerializedObject(proto) else: path_spec = path_spec_factory.Factory.NewPathSpec( definitions.TYPE_INDICATOR_OS, location=options.file_to_parse) file_entry = path_spec_resolver.Resolver.OpenFileEntry(path_spec) if file_entry is None: logging.error(u'Unable to open file: {0:s}'.format( options.file_to_parse)) sys.exit(1) # Set few options the engine expects to be there. # TODO: Can we rather set this directly in argparse? options.single_process = True options.debug = False options.text_prepend = u'' # Set up the engine. collection_queue = queue.SingleThreadedQueue() storage_queue = queue.SingleThreadedQueue() parse_error_queue = queue.SingleThreadedQueue() engine_object = engine.Engine(collection_queue, storage_queue, parse_error_queue) # Create a worker. worker_object = engine_object.CreateExtractionWorker('0') # TODO: add support for parser_filter_string. worker_object.InitalizeParserObjects() if options.verbose: profiler = cProfile.Profile() profiler.enable() else: time_start = time.time() worker_object.ParseFileEntry(file_entry) if options.verbose: profiler.disable() else: time_end = time.time() engine_object.SignalEndOfInputStorageQueue() event_object_consumer = PprofEventObjectQueueConsumer(storage_queue) event_object_consumer.ConsumeEventObjects() if not options.verbose: print frontend_utils.FormatHeader('Time Used') print u'{:>20f}s'.format(time_end - time_start) print frontend_utils.FormatHeader('Parsers Loaded') # Accessing protected member. # pylint: disable=protected-access plugins = [] for parser_object in sorted(worker_object._parser_objects): print frontend_utils.FormatOutputString('', parser_object.NAME) parser_plugins = getattr(parser_object, '_plugins', []) plugins.extend(parser_plugins) print frontend_utils.FormatHeader('Plugins Loaded') for plugin in sorted(plugins): if isinstance(plugin, basestring): print frontend_utils.FormatOutputString('', plugin) else: plugin_string = getattr(plugin, 'NAME', u'N/A') print frontend_utils.FormatOutputString('', plugin_string) print frontend_utils.FormatHeader('Parsers Used') for parser in sorted(event_object_consumer.parsers): print frontend_utils.FormatOutputString('', parser) print frontend_utils.FormatHeader('Plugins Used') for plugin in sorted(event_object_consumer.plugins): print frontend_utils.FormatOutputString('', plugin) print frontend_utils.FormatHeader('Counter') for key, value in event_object_consumer.counter.most_common(): print frontend_utils.FormatOutputString(key, value) if options.verbose: return GetStats(profiler)