Esempio n. 1
0
    def testInternalExportEvents(self):
        """Tests the _ExportEvents function."""
        knowledge_base_object = knowledge_base.KnowledgeBase()
        output_writer = cli_test_lib.TestOutputWriter()

        formatter_mediator = formatters_mediator.FormatterMediator()

        output_mediator_object = output_mediator.OutputMediator(
            knowledge_base_object, formatter_mediator)

        output_module = TestOutputModule(output_mediator_object)
        output_module.SetOutputWriter(output_writer)

        test_engine = psort.PsortMultiProcessEngine()

        formatters_manager.FormattersManager.RegisterFormatter(
            TestEventFormatter)

        with shared_test_lib.TempDirectory() as temp_directory:
            temp_file = os.path.join(temp_directory, 'storage.plaso')
            self._CreateTestStorageFile(temp_file)

            storage_reader = (storage_factory.StorageFactory.
                              CreateStorageReaderForFile(temp_file))
            storage_reader.ReadPreprocessingInformation(knowledge_base_object)

            test_engine._ExportEvents(storage_reader,
                                      output_module,
                                      deduplicate_events=False)

        formatters_manager.FormattersManager.DeregisterFormatter(
            TestEventFormatter)

        self.assertEqual(len(output_module.events), 17)
        self.assertEqual(len(output_module.macb_groups), 3)
Esempio n. 2
0
    def testInternalExportEventsDeduplicate(self):
        """Tests the _ExportEvents function with deduplication."""
        knowledge_base_object = knowledge_base.KnowledgeBase()

        output_mediator_object = output_mediator.OutputMediator(
            knowledge_base_object,
            data_location=shared_test_lib.TEST_DATA_PATH)

        formatters_directory_path = self._GetDataFilePath(['formatters'])
        output_mediator_object.ReadMessageFormattersFromDirectory(
            formatters_directory_path)

        output_module = TestOutputModule(output_mediator_object)

        test_engine = psort.PsortMultiProcessEngine()

        with shared_test_lib.TempDirectory() as temp_directory:
            temp_file = os.path.join(temp_directory, 'storage.plaso')
            self._CreateTestStorageFile(temp_file)
            self._ReadSessionConfiguration(temp_file, knowledge_base_object)

            storage_reader = (storage_factory.StorageFactory.
                              CreateStorageReaderForFile(temp_file))
            storage_reader.ReadSystemConfiguration(knowledge_base_object)

            test_engine._ExportEvents(storage_reader, output_module)

        self.assertEqual(len(output_module.events), 15)
        self.assertEqual(len(output_module.macb_groups), 3)
Esempio n. 3
0
    def testAnalyzeEvents(self):
        """Tests the AnalyzeEvents function."""
        test_file_path = self._GetTestFilePath(['psort_test.plaso'])
        self._SkipIfPathNotExists(test_file_path)

        session = sessions.Session()
        knowledge_base_object = knowledge_base.KnowledgeBase()

        output_mediator_object = output_mediator.OutputMediator(
            knowledge_base_object,
            data_location=shared_test_lib.TEST_DATA_PATH)

        output_mediator_object.SetPreferredLanguageIdentifier('en-US')

        output_module = null.NullOutputModule(output_mediator_object)

        data_location = ''
        analysis_plugin = tagging.TaggingAnalysisPlugin()
        analysis_plugins = {'tagging': analysis_plugin}
        # TODO: set tag file.

        configuration = configurations.ProcessingConfiguration()

        test_engine = psort.PsortMultiProcessEngine()

        with shared_test_lib.TempDirectory() as temp_directory:
            temp_file = os.path.join(temp_directory, 'storage.plaso')
            shutil.copyfile(test_file_path, temp_file)

            storage_writer = storage_factory.StorageFactory.CreateStorageWriter(
                definitions.DEFAULT_STORAGE_FORMAT, session, temp_file)

            counter = test_engine.AnalyzeEvents(knowledge_base_object,
                                                storage_writer, output_module,
                                                data_location,
                                                analysis_plugins,
                                                configuration)

        # TODO: assert if tests were successful.
        _ = counter

        test_filter = filters_test_lib.TestEventFilter()

        with shared_test_lib.TempDirectory() as temp_directory:
            temp_file = os.path.join(temp_directory, 'storage.plaso')
            shutil.copyfile(test_file_path, temp_file)

            storage_writer = storage_factory.StorageFactory.CreateStorageWriter(
                definitions.DEFAULT_STORAGE_FORMAT, session, temp_file)

            counter = test_engine.AnalyzeEvents(knowledge_base_object,
                                                storage_writer,
                                                data_location,
                                                analysis_plugins,
                                                configuration,
                                                event_filter=test_filter)

        # TODO: assert if tests were successful.
        _ = counter
Esempio n. 4
0
  def _CreateEngine(self):
    """Creates an engine based on the front end settings.

    Returns:
      BaseEngine: engine.
    """
    # TODO: add single processing support.
    return psort.PsortMultiProcessEngine(use_zeromq=self._use_zeromq)
Esempio n. 5
0
    def testAnalyzeEvents(self):
        """Tests the AnalyzeEvents function."""
        storage_file_path = self._GetTestFilePath(['psort_test.json.plaso'])

        session = sessions.Session()
        knowledge_base_object = knowledge_base.KnowledgeBase()

        formatter_mediator = formatters_mediator.FormatterMediator()
        formatter_mediator.SetPreferredLanguageIdentifier('en-US')

        output_mediator_object = output_mediator.OutputMediator(
            knowledge_base_object, formatter_mediator)

        output_module = null.NullOutputModule(output_mediator_object)

        data_location = ''
        analysis_plugin = tagging.TaggingAnalysisPlugin()
        analysis_plugins = {'tagging': analysis_plugin}
        # TODO: set tag file.

        test_engine = psort.PsortMultiProcessEngine()

        with shared_test_lib.TempDirectory() as temp_directory:
            temp_file = os.path.join(temp_directory, 'storage.plaso')
            shutil.copyfile(storage_file_path, temp_file)

            storage_writer = storage_zip_file.ZIPStorageFileWriter(
                session, temp_file)

            counter = test_engine.AnalyzeEvents(knowledge_base_object,
                                                storage_writer, output_module,
                                                data_location,
                                                analysis_plugins)

        # TODO: assert if tests were successful.
        _ = counter

        test_filter = filters_test_lib.TestEventFilter()

        with shared_test_lib.TempDirectory() as temp_directory:
            temp_file = os.path.join(temp_directory, 'storage.plaso')
            shutil.copyfile(storage_file_path, temp_file)

            storage_writer = storage_zip_file.ZIPStorageFileWriter(
                session, temp_file)

            counter = test_engine.AnalyzeEvents(knowledge_base_object,
                                                storage_writer,
                                                data_location,
                                                analysis_plugins,
                                                event_filter=test_filter)

        # TODO: assert if tests were successful.
        _ = counter
Esempio n. 6
0
    def testExportEvents(self):
        """Tests the ExportEvents function."""
        test_file_path = self._GetTestFilePath(['psort_test.plaso'])
        self._SkipIfPathNotExists(test_file_path)

        knowledge_base_object = knowledge_base.KnowledgeBase()
        output_writer = cli_test_lib.TestBinaryOutputWriter()

        formatters_manager.FormattersManager.Reset()
        formatters_directory_path = self._GetDataFilePath(['formatters'])
        formatters_manager.FormattersManager.ReadFormattersFromDirectory(
            formatters_directory_path)

        formatter_mediator = formatters_mediator.FormatterMediator()
        formatter_mediator.SetPreferredLanguageIdentifier('en-US')

        output_mediator_object = output_mediator.OutputMediator(
            knowledge_base_object,
            formatter_mediator,
            data_location=shared_test_lib.TEST_DATA_PATH)

        output_module = dynamic.DynamicOutputModule(output_mediator_object)
        output_module.SetOutputWriter(output_writer)

        configuration = configurations.ProcessingConfiguration()

        storage_reader = storage_factory.StorageFactory.CreateStorageReaderForFile(
            test_file_path)

        test_engine = psort.PsortMultiProcessEngine()
        test_engine.ExportEvents(knowledge_base_object, storage_reader,
                                 output_module, configuration)

        lines = []
        output = output_writer.ReadOutput()
        # TODO: add test output writer that produces strings also see:
        # https://github.com/log2timeline/plaso/issues/1963
        output = codecs.decode(output, 'utf-8')
        for line in output.split('\n'):
            lines.append(line)

        self.assertEqual(len(lines), 22)

        expected_line = ('2014-11-18T01:15:43+00:00,'
                         'Content Modification Time,'
                         'LOG,'
                         'Log File,'
                         '[---] last message repeated 5 times ---,'
                         'syslog,'
                         'OS:/tmp/test/test_data/syslog,'
                         'repeated')
        self.assertEqual(lines[14], expected_line)
Esempio n. 7
0
    def _CreateEngine(self):
        """Creates an engine based on the front end settings.

    Returns:
      BaseEngine: engine.
    """
        return psort.PsortMultiProcessEngine(
            debug_output=self._debug_mode,
            enable_profiling=self._enable_profiling,
            profiling_directory=self._profiling_directory,
            profiling_sample_rate=self._profiling_sample_rate,
            profiling_type=self._profiling_type,
            use_zeromq=self._use_zeromq)
Esempio n. 8
0
    def testInternalExportEvents(self):
        """Tests the _ExportEvents function."""
        knowledge_base_object = knowledge_base.KnowledgeBase()
        output_writer = cli_test_lib.TestOutputWriter()

        formatter_mediator = formatters_mediator.FormatterMediator()

        output_mediator_object = output_mediator.OutputMediator(
            knowledge_base_object, formatter_mediator)

        output_module = TestOutputModule(output_mediator_object)
        output_module.SetOutputWriter(output_writer)

        test_engine = psort.PsortMultiProcessEngine()

        formatters_manager.FormattersManager.RegisterFormatter(
            TestEventFormatter)

        with shared_test_lib.TempDirectory() as temp_directory:
            temp_file = os.path.join(temp_directory, u'storage.plaso')
            self._CreateTestStorageFile(temp_file)

            storage_reader = storage_zip_file.ZIPStorageFileReader(temp_file)
            storage_reader.ReadPreprocessingInformation(knowledge_base_object)

            event_buffer = TestEventBuffer(output_module, check_dedups=False)

            test_engine._ExportEvents(storage_reader, event_buffer)

        event_buffer.Flush()

        formatters_manager.FormattersManager.DeregisterFormatter(
            TestEventFormatter)

        lines = []
        output = output_writer.ReadOutput()
        for line in output.split(b'\n'):
            lines.append(line)

        self.assertEqual(len(lines), 8)

        self.assertTrue(b'My text goes along: My text dude. lines' in lines[2])
        self.assertTrue(b'LOG/' in lines[2])
        self.assertTrue(b'None in Particular' in lines[2])
        self.assertEqual(lines[0], (
            b'date,time,timezone,MACB,source,sourcetype,type,user,host,short,desc,'
            b'version,filename,inode,notes,format,extra'))
Esempio n. 9
0
    def testInternalAnalyzeEvents(self):
        """Tests the _AnalyzeEvents function."""
        session = sessions.Session()
        knowledge_base_object = knowledge_base.KnowledgeBase()

        test_engine = psort.PsortMultiProcessEngine()

        test_plugin = TestAnalysisPlugin()

        with shared_test_lib.TempDirectory() as temp_directory:
            temp_file = os.path.join(temp_directory, 'storage.plaso')
            self._CreateTestStorageFile(temp_file)
            self._ReadSessionConfiguration(temp_file, knowledge_base_object)

            storage_writer = storage_factory.StorageFactory.CreateStorageWriter(
                definitions.DEFAULT_STORAGE_FORMAT, session, temp_file)

            storage_writer.StartTaskStorage()

            storage_writer.Open()

            # TODO: implement, this currently loops infinite.
            # test_engine._AnalyzeEvents(storage_writer, [test_plugin])
            storage_writer.Close()

        test_filter = filters_test_lib.TestEventFilter()

        with shared_test_lib.TempDirectory() as temp_directory:
            temp_file = os.path.join(temp_directory, 'storage.plaso')
            self._CreateTestStorageFile(temp_file)
            self._ReadSessionConfiguration(temp_file, knowledge_base_object)

            storage_writer = storage_factory.StorageFactory.CreateStorageWriter(
                definitions.DEFAULT_STORAGE_FORMAT, session, temp_file)

            storage_writer.StartTaskStorage()

            storage_writer.Open()

            # TODO: implement, this currently loops infinite.
            _ = test_engine
            _ = test_plugin
            _ = test_filter
            # test_engine._AnalyzeEvents(
            #    storage_writer, [test_plugin], event_filter=test_filter)
            storage_writer.Close()
Esempio n. 10
0
  def testInternalAnalyzeEvents(self):
    """Tests the _AnalyzeEvents function."""
    session = sessions.Session()
    knowledge_base_object = knowledge_base.KnowledgeBase()

    test_engine = psort.PsortMultiProcessEngine()

    test_plugin = TestAnalysisPlugin()

    with shared_test_lib.TempDirectory() as temp_directory:
      temp_file = os.path.join(temp_directory, u'storage.plaso')
      self._CreateTestStorageFile(temp_file)

      storage_writer = storage_zip_file.ZIPStorageFileWriter(
          session, temp_file)

      storage_writer.StartTaskStorage()

      storage_writer.Open()
      storage_writer.ReadPreprocessingInformation(knowledge_base_object)

      # TODO: implement, this currently loops infinite.
      # test_engine._AnalyzeEvents(storage_writer, [test_plugin])
      storage_writer.Close()

    test_filter = filters_test_lib.TestEventFilter()

    with shared_test_lib.TempDirectory() as temp_directory:
      temp_file = os.path.join(temp_directory, u'storage.plaso')
      self._CreateTestStorageFile(temp_file)

      storage_writer = storage_zip_file.ZIPStorageFileWriter(
          session, temp_file)

      storage_writer.StartTaskStorage()

      storage_writer.Open()
      storage_writer.ReadPreprocessingInformation(knowledge_base_object)

      # TODO: implement, this currently loops infinite.
      _ = test_engine
      _ = test_plugin
      _ = test_filter
      # test_engine._AnalyzeEvents(
      #    storage_writer, [test_plugin], event_filter=test_filter)
      storage_writer.Close()
Esempio n. 11
0
    def testExportEvents(self):
        """Tests the ExportEvents function."""
        test_file_path = self._GetTestFilePath(['psort_test.plaso'])
        self._SkipIfPathNotExists(test_file_path)

        knowledge_base_object = knowledge_base.KnowledgeBase()

        test_file_object = io.StringIO()

        output_mediator_object = output_mediator.OutputMediator(
            knowledge_base_object,
            data_location=shared_test_lib.TEST_DATA_PATH)

        formatters_directory_path = self._GetDataFilePath(['formatters'])
        output_mediator_object.ReadMessageFormattersFromDirectory(
            formatters_directory_path)

        output_mediator_object.SetPreferredLanguageIdentifier('en-US')

        output_module = dynamic.DynamicOutputModule(output_mediator_object)
        output_module._file_object = test_file_object

        configuration = configurations.ProcessingConfiguration()

        storage_reader = storage_factory.StorageFactory.CreateStorageReaderForFile(
            test_file_path)

        test_engine = psort.PsortMultiProcessEngine()

        test_engine.ExportEvents(knowledge_base_object, storage_reader,
                                 output_module, configuration)

        output = test_file_object.getvalue()
        lines = output.split('\n')

        self.assertEqual(len(lines), 22)

        expected_line = ('2014-11-18T01:15:43+00:00,'
                         'Content Modification Time,'
                         'LOG,'
                         'Log File,'
                         '[---] last message repeated 5 times ---,'
                         'syslog,'
                         'OS:/tmp/test/test_data/syslog,'
                         'repeated')
        self.assertEqual(lines[14], expected_line)
Esempio n. 12
0
  def testExportEvents(self):
    """Tests the ExportEvents function."""
    storage_file_path = self._GetTestFilePath(['psort_test.plaso'])

    knowledge_base_object = knowledge_base.KnowledgeBase()
    output_writer = cli_test_lib.TestOutputWriter()

    formatter_mediator = formatters_mediator.FormatterMediator()
    formatter_mediator.SetPreferredLanguageIdentifier('en-US')

    output_mediator_object = output_mediator.OutputMediator(
        knowledge_base_object, formatter_mediator)

    output_module = dynamic.DynamicOutputModule(output_mediator_object)
    output_module.SetOutputWriter(output_writer)

    configuration = configurations.ProcessingConfiguration()

    storage_reader = storage_factory.StorageFactory.CreateStorageReaderForFile(
        storage_file_path)

    test_engine = psort.PsortMultiProcessEngine()
    counter = test_engine.ExportEvents(
        knowledge_base_object, storage_reader, output_module, configuration)

    self.assertEqual(counter['Stored Events'], 0)

    lines = []
    output = output_writer.ReadOutput()
    for line in output.split(b'\n'):
      lines.append(line)

    self.assertEqual(len(lines), 22)

    expected_line = (
        '2014-11-18T01:15:43+00:00,'
        'Content Modification Time,'
        'LOG,'
        'Log File,'
        '[---] last message repeated 5 times ---,'
        'syslog,'
        'OS:/tmp/test/test_data/syslog,'
        'repeated')
    self.assertEqual(lines[14], expected_line)
Esempio n. 13
0
  def testExportEvents(self):
    """Tests the ExportEvents function."""
    storage_file_path = self._GetTestFilePath([u'psort_test.json.plaso'])

    knowledge_base_object = knowledge_base.KnowledgeBase()
    output_writer = cli_test_lib.TestOutputWriter()

    formatter_mediator = formatters_mediator.FormatterMediator()
    formatter_mediator.SetPreferredLanguageIdentifier(u'en-US')

    output_mediator_object = output_mediator.OutputMediator(
        knowledge_base_object, formatter_mediator)

    output_module = dynamic.DynamicOutputModule(output_mediator_object)
    output_module.SetOutputWriter(output_writer)

    storage_reader = storage_zip_file.ZIPStorageFileReader(storage_file_path)

    test_engine = psort.PsortMultiProcessEngine()
    counter = test_engine.ExportEvents(
        knowledge_base_object, storage_reader, output_module)

    # TODO: refactor preprocessing object.
    self.assertEqual(counter[u'Stored Events'], 0)

    lines = []
    output = output_writer.ReadOutput()
    for line in output.split(b'\n'):
      lines.append(line)

    self.assertEqual(len(lines), 24)

    expected_line = (
        u'2016-10-16T15:13:43+00:00,'
        u'mtime,'
        u'FILE,'
        u'OS mtime,'
        u'OS:/tmp/test/test_data/syslog Type: file,'
        u'filestat,'
        u'OS:/tmp/test/test_data/syslog,-')
    self.assertEquals(lines[14], expected_line)
Esempio n. 14
0
    def testInternalExportEventsDeduplicate(self):
        """Tests the _ExportEvents function with deduplication."""
        knowledge_base_object = knowledge_base.KnowledgeBase()
        output_writer = cli_test_lib.TestBinaryOutputWriter()

        formatter_mediator = formatters_mediator.FormatterMediator()

        output_mediator_object = output_mediator.OutputMediator(
            knowledge_base_object, formatter_mediator)

        output_module = TestOutputModule(output_mediator_object)
        output_module.SetOutputWriter(output_writer)

        test_engine = psort.PsortMultiProcessEngine()

        formatters_manager.FormattersManager.RegisterFormatter(
            TestEventFormatter)

        with shared_test_lib.TempDirectory() as temp_directory:
            temp_file = os.path.join(temp_directory, 'storage.plaso')
            self._CreateTestStorageFile(temp_file)

            storage_reader = (storage_factory.StorageFactory.
                              CreateStorageReaderForFile(temp_file))
            storage_reader.ReadSystemConfiguration(knowledge_base_object)

            test_engine._ExportEvents(storage_reader, output_module)

        formatters_manager.FormattersManager.DeregisterFormatter(
            TestEventFormatter)

        lines = []
        output = output_writer.ReadOutput()
        for line in output.split(b'\n'):
            lines.append(line)

        self.assertEqual(len(output_module.events), 15)
        self.assertEqual(len(output_module.macb_groups), 3)
Esempio n. 15
0
  def ProcessStorage(self):
    """Processes a plaso storage file.

    Raises:
      BadConfigOption: when a configuration parameter fails validation.
      RuntimeError: if a non-recoverable situation is encountered.
    """
    self._CheckStorageFile(self._storage_file_path)

    self._status_view.SetMode(self._status_view_mode)
    self._status_view.SetStorageFileInformation(self._storage_file_path)

    status_update_callback = (
        self._status_view.GetAnalysisStatusUpdateCallback())

    session = engine.BaseEngine.CreateSession(
        command_line_arguments=self._command_line_arguments,
        preferred_encoding=self.preferred_encoding)

    storage_reader = storage_factory.StorageFactory.CreateStorageReaderForFile(
        self._storage_file_path)
    if not storage_reader:
      logger.error('Format of storage file: {0:s} not supported'.format(
          self._storage_file_path))
      return

    self._number_of_analysis_reports = (
        storage_reader.GetNumberOfAnalysisReports())
    storage_reader.Close()

    configuration = configurations.ProcessingConfiguration()
    configuration.data_location = self._data_location
    configuration.profiling.directory = self._profiling_directory
    configuration.profiling.sample_rate = self._profiling_sample_rate
    configuration.profiling.profilers = self._profilers

    analysis_counter = None
    if self._analysis_plugins:
      storage_writer = (
          storage_factory.StorageFactory.CreateStorageWriterForFile(
              session, self._storage_file_path))

      # TODO: add single processing support.
      analysis_engine = psort.PsortMultiProcessEngine(
          use_zeromq=self._use_zeromq)

      analysis_engine.AnalyzeEvents(
          self._knowledge_base, storage_writer, self._data_location,
          self._analysis_plugins, configuration,
          event_filter=self._event_filter,
          event_filter_expression=self._event_filter_expression,
          status_update_callback=status_update_callback,
          worker_memory_limit=self._worker_memory_limit)

      analysis_counter = collections.Counter()
      for item, value in iter(session.analysis_reports_counter.items()):
        analysis_counter[item] = value

    events_counter = None
    if self._output_format != 'null':
      storage_reader = (
          storage_factory.StorageFactory.CreateStorageReaderForFile(
              self._storage_file_path))

      # TODO: add single processing support.
      analysis_engine = psort.PsortMultiProcessEngine(
          use_zeromq=self._use_zeromq)

      events_counter = analysis_engine.ExportEvents(
          self._knowledge_base, storage_reader, self._output_module,
          configuration, deduplicate_events=self._deduplicate_events,
          event_filter=self._event_filter,
          status_update_callback=status_update_callback,
          time_slice=self._time_slice, use_time_slicer=self._use_time_slicer)

    if self._quiet_mode:
      return

    self._output_writer.Write('Processing completed.\n')

    if analysis_counter:
      table_view = views.ViewsFactory.GetTableView(
          self._views_format_type, title='Analysis reports generated')
      for element, count in analysis_counter.most_common():
        if element != 'total':
          table_view.AddRow([element, count])

      table_view.AddRow(['Total', analysis_counter['total']])
      table_view.Write(self._output_writer)

    if events_counter:
      table_view = views.ViewsFactory.GetTableView(
          self._views_format_type, title='Export results')
      for element, count in events_counter.most_common():
        table_view.AddRow([element, count])
      table_view.Write(self._output_writer)

    storage_reader = storage_factory.StorageFactory.CreateStorageReaderForFile(
        self._storage_file_path)
    self._PrintAnalysisReportsDetails(storage_reader)
Esempio n. 16
0
  def AnalyzeEvents(self):
    """Analyzes events from a plaso storage file and generate a report.

    Raises:
      BadConfigOption: when a configuration parameter fails validation or the
          storage file cannot be opened with read access.
      RuntimeError: if a non-recoverable situation is encountered.
    """
    session = engine.BaseEngine.CreateSession(
        command_line_arguments=self._command_line_arguments,
        preferred_encoding=self.preferred_encoding)

    storage_reader = storage_factory.StorageFactory.CreateStorageReaderForFile(
        self._storage_file_path)
    if not storage_reader:
      raise errors.BadConfigOption(
          'Format of storage file: {0:s} not supported'.format(
              self._storage_file_path))

    self._number_of_analysis_reports = (
        storage_reader.GetNumberOfAnalysisReports())
    storage_reader.Close()

    configuration = self._CreateProcessingConfiguration(
        self._knowledge_base)

    counter = collections.Counter()
    if self._output_format != 'null':
      self._status_view.SetMode(self._status_view_mode)
      self._status_view.SetStorageFileInformation(self._storage_file_path)

      status_update_callback = (
          self._status_view.GetAnalysisStatusUpdateCallback())

      storage_reader = (
          storage_factory.StorageFactory.CreateStorageReaderForFile(
              self._storage_file_path))

      # TODO: add single processing support.
      analysis_engine = psort.PsortMultiProcessEngine(
          worker_memory_limit=self._worker_memory_limit,
          worker_timeout=self._worker_timeout)

      analysis_engine.ExportEvents(
          self._knowledge_base, storage_reader, self._output_module,
          configuration, deduplicate_events=self._deduplicate_events,
          status_update_callback=status_update_callback,
          time_slice=self._time_slice, use_time_slicer=self._use_time_slicer)

      self._output_module.Close()
      self._output_module = None

    for item, value in session.analysis_reports_counter.items():
      counter[item] = value

    if self._quiet_mode:
      return

    self._output_writer.Write('Processing completed.\n')

    table_view = views.ViewsFactory.GetTableView(
        self._views_format_type, title='Counter')
    for element, count in counter.most_common():
      if not element:
        element = 'N/A'
      table_view.AddRow([element, count])
    table_view.Write(self._output_writer)

    storage_reader = storage_factory.StorageFactory.CreateStorageReaderForFile(
        self._storage_file_path)
    self._PrintAnalysisReportsDetails(
        storage_reader, self._number_of_analysis_reports)

    self._output_writer.Write('Storage file is {0:s}\n'.format(
        self._storage_file_path))
Esempio n. 17
0
  def ProcessStorage(self):
    """Processes a plaso storage file.

    Raises:
      BadConfigOption: when a configuration parameter fails validation or the
          storage file cannot be opened with read access.
      RuntimeError: if a non-recoverable situation is encountered.
    """
    self._status_view.SetMode(self._status_view_mode)
    self._status_view.SetStorageFileInformation(self._storage_file_path)

    status_update_callback = (
        self._status_view.GetAnalysisStatusUpdateCallback())

    session = engine.BaseEngine.CreateSession(
        command_line_arguments=self._command_line_arguments,
        preferred_encoding=self.preferred_encoding)

    storage_reader = storage_factory.StorageFactory.CreateStorageReaderForFile(
        self._storage_file_path)
    if not storage_reader:
      raise RuntimeError('Unable to create storage reader.')

    for session in storage_reader.GetSessions():
      if not session.source_configurations:
        storage_reader.ReadSystemConfiguration(self._knowledge_base)
      else:
        for source_configuration in session.source_configurations:
          self._knowledge_base.ReadSystemConfigurationArtifact(
              source_configuration.system_configuration,
              session_identifier=session.identifier)

      self._knowledge_base.SetTextPrepend(session.text_prepend)

    self._number_of_analysis_reports = (
        storage_reader.GetNumberOfAnalysisReports())
    storage_reader.Close()

    configuration = configurations.ProcessingConfiguration()
    configuration.data_location = self._data_location
    configuration.debug_output = self._debug_mode
    configuration.log_filename = self._log_file
    configuration.profiling.directory = self._profiling_directory
    configuration.profiling.sample_rate = self._profiling_sample_rate
    configuration.profiling.profilers = self._profilers

    analysis_counter = None
    if self._analysis_plugins:
      storage_writer = (
          storage_factory.StorageFactory.CreateStorageWriterForFile(
              session, self._storage_file_path))
      if not storage_writer:
        raise RuntimeError('Unable to create storage writer.')

      # TODO: add single processing support.
      analysis_engine = psort.PsortMultiProcessEngine(
          worker_memory_limit=self._worker_memory_limit,
          worker_timeout=self._worker_timeout)

      analysis_engine.AnalyzeEvents(
          self._knowledge_base, storage_writer, self._data_location,
          self._analysis_plugins, configuration,
          event_filter=self._event_filter,
          event_filter_expression=self._event_filter_expression,
          status_update_callback=status_update_callback)

      analysis_counter = collections.Counter()
      for item, value in session.analysis_reports_counter.items():
        analysis_counter[item] = value

    if self._output_format != 'null':
      storage_reader = (
          storage_factory.StorageFactory.CreateStorageReaderForFile(
              self._storage_file_path))

      # TODO: add single processing support.
      analysis_engine = psort.PsortMultiProcessEngine(
          worker_memory_limit=self._worker_memory_limit,
          worker_timeout=self._worker_timeout)

      analysis_engine.ExportEvents(
          self._knowledge_base, storage_reader, self._output_module,
          configuration, deduplicate_events=self._deduplicate_events,
          event_filter=self._event_filter,
          status_update_callback=status_update_callback,
          time_slice=self._time_slice, use_time_slicer=self._use_time_slicer)

      self._output_module.Close()
      self._output_module = None

    if self._quiet_mode:
      return

    self._output_writer.Write('Processing completed.\n')

    if analysis_counter:
      table_view = views.ViewsFactory.GetTableView(
          self._views_format_type, title='Analysis reports generated')
      for element, count in analysis_counter.most_common():
        if element != 'total':
          table_view.AddRow([element, count])

      table_view.AddRow(['Total', analysis_counter['total']])
      table_view.Write(self._output_writer)

    storage_reader = storage_factory.StorageFactory.CreateStorageReaderForFile(
        self._storage_file_path)
    self._PrintAnalysisReportsDetails(storage_reader)
Esempio n. 18
0
    def AnalyzeEvents(self):
        """Analyzes events from a plaso storage file and generate a report.

    Raises:
      BadConfigOption: when a configuration parameter fails validation.
      RuntimeError: if a non-recoverable situation is encountered.
    """
        session = engine.BaseEngine.CreateSession(
            command_line_arguments=self._command_line_arguments,
            preferred_encoding=self.preferred_encoding)

        storage_reader = storage_zip_file.ZIPStorageFileReader(
            self._storage_file_path)
        self._number_of_analysis_reports = (
            storage_reader.GetNumberOfAnalysisReports())
        storage_reader.Close()

        counter = collections.Counter()
        if self._output_format != u'null':
            self._status_view.SetMode(self._status_view_mode)
            self._status_view.SetStorageFileInformation(
                self._storage_file_path)

            status_update_callback = (
                self._status_view.GetAnalysisStatusUpdateCallback())

            storage_reader = storage_zip_file.ZIPStorageFileReader(
                self._storage_file_path)

            # TODO: add single processing support.
            analysis_engine = psort.PsortMultiProcessEngine(
                use_zeromq=self._use_zeromq)

            # TODO: pass configuration object.
            events_counter = analysis_engine.ExportEvents(
                self._knowledge_base,
                storage_reader,
                self._output_module,
                deduplicate_events=self._deduplicate_events,
                status_update_callback=status_update_callback,
                time_slice=self._time_slice,
                use_time_slicer=self._use_time_slicer)

            counter += events_counter

        for item, value in iter(session.analysis_reports_counter.items()):
            counter[item] = value

        if self._quiet_mode:
            return

        self._output_writer.Write(u'Processing completed.\n')

        table_view = views.ViewsFactory.GetTableView(self._views_format_type,
                                                     title=u'Counter')
        for element, count in counter.most_common():
            if not element:
                element = u'N/A'
            table_view.AddRow([element, count])
        table_view.Write(self._output_writer)

        storage_reader = storage_zip_file.ZIPStorageFileReader(
            self._storage_file_path)
        self._PrintAnalysisReportsDetails(storage_reader,
                                          self._number_of_analysis_reports)

        self._output_writer.Write(u'Storage file is {0:s}\n'.format(
            self._storage_file_path))