Esempio n. 1
0
    def testViperLookup(self):
        """Tests for the Viper analysis plugin."""
        event_queue = single_process.SingleProcessQueue()
        knowledge_base = self._SetUpKnowledgeBase()

        # Fill the incoming queue with events.
        test_queue_producer = plaso_queue.ItemQueueProducer(event_queue)
        events = [
            self._CreateTestEventObject(test_event)
            for test_event in self.TEST_EVENTS
        ]
        test_queue_producer.ProduceItems(events)

        # Set up the plugin.
        analysis_plugin = viper.ViperAnalysisPlugin(event_queue)
        analysis_plugin.SetProtocol(u'http')
        analysis_plugin.SetHost(u'localhost')

        # Run the analysis plugin.
        analysis_report_queue_consumer = self._RunAnalysisPlugin(
            analysis_plugin, knowledge_base)
        analysis_reports = self._GetAnalysisReportsFromQueue(
            analysis_report_queue_consumer)

        self.assertEqual(len(analysis_reports), 1)
        report = analysis_reports[0]
        tags = report.GetTags()
        self.assertEqual(len(tags), 1)
        tag = tags[0]
        self.assertEqual(tag.event_uuid, u'8')
        expected_labels = [
            u'viper_present', u'viper_project_default', u'viper_tag_rat',
            u'viper_tag_darkcomet'
        ]
        self.assertEqual(tag.labels, expected_labels)
Esempio n. 2
0
  def _ParseFile(self, parser_object, path, knowledge_base_object):
    """Parses a file using the parser object.

    Args:
      parser_object: the parser object.
      path: the path of the file to parse.
      knowledge_base_object: the knowledge base object (instance of
                             KnowledgeBase).

    Returns:
      An event object queue object (instance of Queue).
    """
    event_queue = single_process.SingleProcessQueue()
    event_queue_producer = plaso_queue.ItemQueueProducer(event_queue)

    parse_error_queue = single_process.SingleProcessQueue()

    parser_mediator = parsers_mediator.ParserMediator(
        event_queue_producer, parse_error_queue, knowledge_base_object)
    path_spec = path_spec_factory.Factory.NewPathSpec(
        definitions.TYPE_INDICATOR_OS, location=path)
    file_entry = path_spec_resolver.Resolver.OpenFileEntry(path_spec)
    parser_mediator.SetFileEntry(file_entry)

    file_object = file_entry.GetFileObject()
    try:
      parser_object.Parse(parser_mediator, file_object)
    finally:
      file_object.close()

    return event_queue
    def testUniqueDomainExtraction(self):
        """Tests for the unique domains plugin."""
        event_queue = single_process.SingleProcessQueue()
        knowledge_base = self._SetUpKnowledgeBase()

        # Fill the incoming queue with events.
        test_queue_producer = plaso_queue.ItemQueueProducer(event_queue)
        event_objects = [
            self._CreateTestEventObject(test_event)
            for test_event in self._EVENT_DICTS
        ]
        test_queue_producer.ProduceItems(event_objects)

        # Set up the plugin.
        analysis_plugin = unique_domains_visited.UniqueDomainsVisitedPlugin(
            event_queue)

        analysis_report_queue_consumer = self._RunAnalysisPlugin(
            analysis_plugin, knowledge_base)
        analysis_reports = self._GetAnalysisReportsFromQueue(
            analysis_report_queue_consumer)

        self.assertEqual(len(analysis_reports), 1)
        report_text = analysis_reports[0].GetString()
        for event_object in self._EVENT_DICTS:
            self.assertIn(event_object.get(u'domain', u''), report_text)
Esempio n. 4
0
    def testVirusTotalLookup(self):
        """Tests for the VirusTotal analysis plugin."""
        event_queue = single_process.SingleProcessQueue()
        knowledge_base = self._SetUpKnowledgeBase()

        # Fill the incoming queue with events.
        test_queue_producer = plaso_queue.ItemQueueProducer(event_queue)
        events = [
            self._CreateTestEventObject(test_event)
            for test_event in self.TEST_EVENTS
        ]
        test_queue_producer.ProduceItems(events)
        analysis_plugin = virustotal.VirusTotalAnalysisPlugin(event_queue)
        analysis_plugin.SetAPIKey(self.FAKE_API_KEY)

        # Run the analysis plugin.
        analysis_report_queue_consumer = self._RunAnalysisPlugin(
            analysis_plugin, knowledge_base)
        analysis_reports = self._GetAnalysisReportsFromQueue(
            analysis_report_queue_consumer)

        self.assertEqual(len(analysis_reports), 1)
        report = analysis_reports[0]
        tags = report.GetTags()
        self.assertEqual(len(tags), 1)
        tag = tags[0]
        self.assertEqual(tag.event_uuid, u'8')
        self.assertEqual(tag.labels[0], u'virustotal_detections_10')
Esempio n. 5
0
  def setUp(self):
    """Makes preparations before running an individual test."""
    knowledge_base = self._SetUpKnowledgeBase()

    analysis_report_queue = single_process.SingleProcessQueue()
    analysis_report_queue_producer = plaso_queue.ItemQueueProducer(
        analysis_report_queue)

    self._analysis_mediator = mediator.AnalysisMediator(
        analysis_report_queue_producer, knowledge_base)
Esempio n. 6
0
  def _StartAnalysisPlugins(
      self, storage_file_path, analysis_plugins, pre_obj,
      analysis_queue_port=None, analysis_report_incoming_queue=None,
      command_line_arguments=None):
    """Start all the analysis plugin.

    Args:
      storage_file_path: string containing the path of the storage file.
      analysis_plugins: list of analysis plugin objects (instance of
                        AnalysisPlugin) that should be started.
      pre_obj: The preprocessor object (instance of PreprocessObject).
      analysis_queue_port: optional TCP port that the ZeroMQ analysis report
                           queues should use.
      analysis_report_incoming_queue: optional queue (instance of Queue) that
                                      reports should to pushed to, when ZeroMQ
                                      is not in use.
      command_line_arguments: optional string of the command line arguments or
                              None if not set.
    """
    logging.info(u'Starting analysis plugins.')
    self._SetAnalysisPluginProcessInformation(
        storage_file_path, analysis_plugins, pre_obj,
        command_line_arguments=command_line_arguments)

    knowledge_base_object = knowledge_base.KnowledgeBase(pre_obj=pre_obj)
    for analysis_plugin in analysis_plugins:
      if self._use_zeromq:
        analysis_plugin_output_queue = zeromq_queue.ZeroMQPushConnectQueue(
            delay_open=True, port=analysis_queue_port)
      else:
        analysis_plugin_output_queue = analysis_report_incoming_queue

      analysis_report_queue_producer = plaso_queue.ItemQueueProducer(
          analysis_plugin_output_queue)

      completion_event = multiprocessing.Event()
      analysis_mediator_object = analysis_mediator.AnalysisMediator(
          analysis_report_queue_producer, knowledge_base_object,
          data_location=self._data_location,
          completion_event=completion_event)
      analysis_process = multiprocessing.Process(
          name=u'Analysis {0:s}'.format(analysis_plugin.plugin_name),
          target=analysis_plugin.RunPlugin,
          args=(analysis_mediator_object,))

      process_info = PsortAnalysisProcess(
          completion_event, analysis_plugin, analysis_process)
      self._analysis_process_info.append(process_info)

      analysis_process.start()
      logging.info(
          u'Plugin: [{0:s}] started.'.format(analysis_plugin.plugin_name))

    logging.info(u'Analysis plugins running')
Esempio n. 7
0
    def CreateParserMediator(self, event_queue=None):
        """Create a parser mediator object.

    Args:
      event_queue: an optional event queue object (instance of Queue).

    Returns:
      A parser mediator object (instance of parsers_mediator.ParserMediator).
    """
        if event_queue is None:
            event_queue = single_process.SingleProcessQueue()
        event_queue_producer = plaso_queue.ItemQueueProducer(event_queue)

        parse_error_queue = single_process.SingleProcessQueue()
        parse_error_queue_producer = plaso_queue.ItemQueueProducer(
            parse_error_queue)

        return parsers_mediator.ParserMediator(event_queue_producer,
                                               parse_error_queue_producer,
                                               self.knowledge_base_object)
Esempio n. 8
0
    def _GetParserMediator(self,
                           event_queue,
                           parse_error_queue,
                           knowledge_base_values=None,
                           file_entry=None,
                           parser_chain=None):
        """Retrieves a parser mediator object.

    Args:
      event_queue: the event queue (instance of Queue).
      parse_error_queue: the parse error queue (instance of Queue).
      knowledge_base_values: optional dict containing the knowledge base
                             values.
      file_entry: optional dfVFS file_entry object (instance of dfvfs.FileEntry)
                  being parsed.
      parser_chain: Optional string containing the parsing chain up to this
                    point.

    Returns:
      A parser mediator object (instance of ParserMediator).
    """
        event_queue_producer = plaso_queue.ItemQueueProducer(event_queue)
        parse_error_queue_producer = plaso_queue.ItemQueueProducer(
            parse_error_queue)

        knowledge_base_object = knowledge_base.KnowledgeBase()
        if knowledge_base_values:
            for identifier, value in iter(knowledge_base_values.items()):
                knowledge_base_object.SetValue(identifier, value)

        new_mediator = mediator.ParserMediator(event_queue_producer,
                                               parse_error_queue_producer,
                                               knowledge_base_object)
        if file_entry:
            new_mediator.SetFileEntry(file_entry)

        if parser_chain:
            new_mediator.parser_chain = parser_chain
        return new_mediator
Esempio n. 9
0
    def GetAnalysisPluginsAndEventQueues(self, analysis_plugins_string):
        """Return a list of analysis plugins and event queues.

    Args:
      analysis_plugins_string: comma separated string with names of analysis
                               plugins to load.

    Returns:
      A tuple of two lists, one containing list of analysis plugins
      and the other a list of event queues.
    """
        if not analysis_plugins_string:
            return [], []

        event_producers = []
        # These are the queues analysis plugins will read from.
        analysis_plugin_input_queues = []
        analysis_plugins_list = [
            name.strip() for name in analysis_plugins_string.split(u',')
        ]

        for _ in range(0, len(analysis_plugins_list)):
            if self._use_zeromq:
                output_queue = zeromq_queue.ZeroMQPushBindQueue()
                # Open the queue so it can bind to a random port, and we can get the
                # port number to use in the input queue.
                output_queue.Open()
                queue_port = output_queue.port
                input_queue = zeromq_queue.ZeroMQPullConnectQueue(
                    port=queue_port, delay_open=True)
                analysis_plugin_input_queues.append(input_queue)
            else:
                input_queue = multi_process.MultiProcessingQueue(timeout=5)
                analysis_plugin_input_queues.append(input_queue)
                output_queue = input_queue
            event_producers.append(plaso_queue.ItemQueueProducer(output_queue))

        analysis_plugins = analysis_manager.AnalysisPluginManager.LoadPlugins(
            analysis_plugins_list, analysis_plugin_input_queues)

        analysis_plugins = list(analysis_plugins)

        return analysis_plugins, event_producers
Esempio n. 10
0
    def testWinAnalyzePlugin(self):
        """Test the plugin against mock events."""
        knowledge_base = self._SetUpKnowledgeBase(
            knowledge_base_values={'users': self.WIN_USERS})

        event_queue = single_process.SingleProcessQueue()

        # Fill the incoming queue with events.
        test_queue_producer = plaso_queue.ItemQueueProducer(event_queue)
        test_queue_producer.ProduceItems(
            [self._CreateTestEventObject(path) for path in self.WIN_PATHS])

        # Initialize plugin.
        analysis_plugin = TestChromeExtensionPlugin(event_queue)

        # Run the analysis plugin.
        analysis_report_queue_consumer = self._RunAnalysisPlugin(
            analysis_plugin, knowledge_base)
        analysis_reports = self._GetAnalysisReportsFromQueue(
            analysis_report_queue_consumer)

        self.assertEqual(len(analysis_reports), 1)

        analysis_report = analysis_reports[0]

        self.assertEqual(analysis_plugin._sep, u'\\')

        # Due to the behavior of the join one additional empty string at the end
        # is needed to create the last empty line.
        expected_text = u'\n'.join([
            u' == USER: dude ==',
            u'  Google Keep - notes and lists [hmjkmjkepdijhoojdojkdfohbdgmmhki]',
            u'', u' == USER: frank ==',
            u'  Google Play Music [icppfcnhkcmnfdhfhphakoifcfokfdhg]',
            u'  YouTube [blpcfgokakmgnkcojhhkbfbldkacnbeo]', u'', u''
        ])

        self.assertEqual(analysis_report.text, expected_text)
        self.assertEqual(analysis_report.plugin_name, 'chrome_extension_test')

        expected_keys = set([u'frank', u'dude'])
        self.assertEqual(set(analysis_report.report_dict.keys()),
                         expected_keys)
Esempio n. 11
0
    def testMacAnalyzerPlugin(self):
        """Test the plugin against mock events."""
        knowledge_base = self._SetUpKnowledgeBase(
            knowledge_base_values={'users': self.MAC_USERS})

        event_queue = single_process.SingleProcessQueue()

        # Fill the incoming queue with events.
        test_queue_producer = plaso_queue.ItemQueueProducer(event_queue)
        test_queue_producer.ProduceItems(
            [self._CreateTestEventObject(path) for path in self.MAC_PATHS])

        # Initialize plugin.
        analysis_plugin = TestChromeExtensionPlugin(event_queue)

        # Run the analysis plugin.
        analysis_report_queue_consumer = self._RunAnalysisPlugin(
            analysis_plugin, knowledge_base)
        analysis_reports = self._GetAnalysisReportsFromQueue(
            analysis_report_queue_consumer)

        self.assertEqual(len(analysis_reports), 1)

        analysis_report = analysis_reports[0]

        self.assertEqual(analysis_plugin._sep, u'/')

        # Due to the behavior of the join one additional empty string at the end
        # is needed to create the last empty line.
        expected_text = u'\n'.join([
            u' == USER: dude ==',
            u'  Google Drive [apdfllckaahabafndbhieahigkjlhalf]', u'',
            u' == USER: frank ==',
            u'  Gmail [pjkljhegncpnkpknbcohdijeoejaedia]', u'', u''
        ])

        self.assertEqual(analysis_report.text, expected_text)
        self.assertEqual(analysis_report.plugin_name, 'chrome_extension_test')

        expected_keys = set([u'frank', u'dude'])
        self.assertEqual(set(analysis_report.report_dict.keys()),
                         expected_keys)
Esempio n. 12
0
    def testSyntheticKeysText(self):
        """Test the plugin against mock events."""
        event_queue = single_process.SingleProcessQueue()

        # Fill the incoming queue with events.
        test_queue_producer = plaso_queue.ItemQueueProducer(event_queue)
        events = [
            self._CreateTestEventObject(service_event)
            for service_event in self.SERVICE_EVENTS
        ]
        test_queue_producer.ProduceItems(events)

        # Initialize plugin.
        analysis_plugin = windows_services.WindowsServicesPlugin(event_queue)

        # Run the analysis plugin.
        knowledge_base = self._SetUpKnowledgeBase()
        analysis_report_queue_consumer = self._RunAnalysisPlugin(
            analysis_plugin, knowledge_base)
        analysis_reports = self._GetAnalysisReportsFromQueue(
            analysis_report_queue_consumer)

        self.assertEqual(len(analysis_reports), 1)

        analysis_report = analysis_reports[0]

        expected_text = (u'Listing Windows Services\n'
                         u'TestbDriver\n'
                         u'\tImage Path    = C:\\Dell\\testdriver.sys\n'
                         u'\tService Type  = File System Driver (0x2)\n'
                         u'\tStart Type    = Auto Start (2)\n'
                         u'\tService Dll   = \n'
                         u'\tObject Name   = \n'
                         u'\tSources:\n'
                         u'\t\tC:\\WINDOWS\\system32\\SYSTEM:'
                         u'\\ControlSet001\\services\\TestbDriver\n'
                         u'\t\tC:\\WINDOWS\\system32\\SYSTEM:'
                         u'\\ControlSet003\\services\\TestbDriver\n\n')

        self.assertEqual(expected_text, analysis_report.text)
        self.assertEqual(analysis_report.plugin_name, 'windows_services')
Esempio n. 13
0
    def testStorageWriter(self):
        """Test the storage writer."""
        event_objects = test_lib.CreateTestEventObjects()

        # The storage writer is normally run in a separate thread.
        # For the purpose of this test it has to be run in sequence,
        # hence the call to WriteEventObjects after all the event objects
        # have been queued up.

        # TODO: add upper queue limit.
        # A timeout is used to prevent the multi processing queue to close and
        # stop blocking the current process.
        test_queue = multi_process.MultiProcessingQueue(timeout=0.1)
        test_queue_producer = plaso_queue.ItemQueueProducer(test_queue)
        test_queue_producer.ProduceItems(event_objects)

        test_queue_producer.SignalAbort()

        preprocessing_object = event.PreprocessObject()

        with shared_test_lib.TempDirectory() as temp_directory:
            temp_file = os.path.join(temp_directory, u'plaso.db')
            storage_writer = zip_file.ZIPStorageFileWriter(
                test_queue, temp_file, preprocessing_object)
            storage_writer.WriteEventObjects()

            storage_file = zipfile.ZipFile(temp_file,
                                           mode='r',
                                           compression=zipfile.ZIP_DEFLATED)

            expected_filename_list = [
                u'information.dump', u'plaso_index.000001',
                u'plaso_proto.000001', u'plaso_timestamps.000001',
                u'serializer.txt'
            ]

            filename_list = sorted(storage_file.namelist())
            self.assertEqual(len(filename_list), 5)
            self.assertEqual(filename_list, expected_filename_list)
Esempio n. 14
0
  def _RunAnalysisPlugin(self, analysis_plugin, knowledge_base_object):
    """Analyzes an event object queue using the plugin object.

    Args:
      analysis_plugin: the analysis plugin object (instance of AnalysisPlugin).
      knowledge_base_object: the knowledge base object (instance of
                             KnowledgeBase).

    Returns:
      An event object queue object (instance of Queue).
    """
    analysis_report_queue = single_process.SingleProcessQueue()
    analysis_report_queue_consumer = TestAnalysisReportQueueConsumer(
        analysis_report_queue)
    analysis_report_queue_producer = plaso_queue.ItemQueueProducer(
        analysis_report_queue)

    analysis_mediator = mediator.AnalysisMediator(
        analysis_report_queue_producer, knowledge_base_object)

    analysis_plugin.RunPlugin(analysis_mediator)

    return analysis_report_queue_consumer
Esempio n. 15
0
    def testTag(self):
        """Test that the tagging plugin successfully tags events."""
        event_queue = single_process.SingleProcessQueue()
        test_queue_producer = plaso_queue.ItemQueueProducer(event_queue)
        event_objects = [
            self._CreateTestEventObject(test_event)
            for test_event in self._EVENT_DICTS
        ]
        test_queue_producer.ProduceItems(event_objects)
        analysis_plugin = tagging.TaggingPlugin(event_queue)
        test_file = self._GetTestFilePath([self._TEST_TAG_FILE_NAME])
        analysis_plugin.SetAndLoadTagFile(test_file)

        # Run the plugin.
        knowledge_base = self._SetUpKnowledgeBase()
        analysis_report_queue_consumer = self._RunAnalysisPlugin(
            analysis_plugin, knowledge_base)
        analysis_reports = self._GetAnalysisReportsFromQueue(
            analysis_report_queue_consumer)

        self.assertEqual(len(analysis_reports), 1)
        report = analysis_reports[0]
        self.assertEqual(len(report.GetTags()), 2)
Esempio n. 16
0
    def testEvents(self):
        """Test the plugin against mock events."""
        event_queue = single_process.SingleProcessQueue()

        # Fill the incoming queue with events.
        test_queue_producer = plaso_queue.ItemQueueProducer(event_queue)
        event_objects = [
            self._CreateTestEventObject(event_dict)
            for event_dict in self._EVENT_DICTS
        ]
        test_queue_producer.ProduceItems(event_objects)

        # Initialize plugin.
        analysis_plugin = file_hashes.FileHashesPlugin(event_queue)

        # Run the analysis plugin.
        knowledge_base = self._SetUpKnowledgeBase()
        analysis_report_queue_consumer = self._RunAnalysisPlugin(
            analysis_plugin, knowledge_base)
        analysis_reports = self._GetAnalysisReportsFromQueue(
            analysis_report_queue_consumer)

        self.assertEqual(len(analysis_reports), 1)

        analysis_report = analysis_reports[0]

        expected_text = (
            u'Listing file paths and hashes\n'
            u'FAKE:/opt/2hash_file: alternate_test_hash=5 test_hash=4\n'
            u'FAKE:/opt/dfvfs: test_hash=4\n'
            u'FAKE:/opt/no_hash_file:\n'
            u'FAKE:/var/testing directory with space/file.txt: test_hash=4\n'
            u'FAKE:C:\\Windows\\a.file.txt: test_hash=4\n')

        self.assertEqual(expected_text, analysis_report.text)
        self.assertEqual(analysis_report.plugin_name, u'file_hashes')