Example #1
0
  def testInitialize(self):
    """Tests the __init__ function."""
    session = sessions.Session()
    storage_writer = fake_storage.FakeStorageWriter(session)
    knowledge_base = self._SetUpKnowledgeBase()

    mediator.AnalysisMediator(storage_writer, knowledge_base)
Example #2
0
    def _AnalyzeEvents(self,
                       event_objects,
                       plugin,
                       knowledge_base_values=None):
        """Analyzes events using the analysis plugin.

    Args:
      event_objects (list[EventObject]]): events to analyze.
      plugin (AnalysisPlugin): plugin.
      knowledge_base_values (Optional[dict[str, str]]): knowledge base values.

    Returns:
      FakeStorageWriter: storage writer.
    """
        knowledge_base_object = self._SetUpKnowledgeBase(
            knowledge_base_values=knowledge_base_values)

        session = sessions.Session()
        storage_writer = fake_storage.FakeStorageWriter(session)
        storage_writer.Open()

        mediator = analysis_mediator.AnalysisMediator(storage_writer,
                                                      knowledge_base_object)

        for event in event_objects:
            plugin.ExamineEvent(mediator, event)

        analysis_report = plugin.CompileReport(mediator)
        storage_writer.AddAnalysisReport(analysis_report)

        return storage_writer
Example #3
0
    def _RunAnalysisPlugin(self,
                           analysis_plugin,
                           knowledge_base_object,
                           output_format=u'text'):
        """Analyzes an event object queue using the plugin object.

    Args:
      analysis_plugin: the analysis plugin object (instance of AnalysisPlugin).
      knowledge_base_object: the knowledge base object (instance of
                             KnowledgeBase).
      output_format: Optional output format. The default is 'text'.

    Returns:
      An event object queue object (instance of Queue).
    """
        analysis_report_queue = single_process.SingleProcessQueue()
        analysis_report_queue_consumer = TestAnalysisReportQueueConsumer(
            analysis_report_queue)
        analysis_report_queue_producer = queue.ItemQueueProducer(
            analysis_report_queue)

        analysis_mediator = mediator.AnalysisMediator(
            analysis_report_queue_producer,
            knowledge_base_object,
            output_format=output_format)

        analysis_plugin.RunPlugin(analysis_mediator)
        analysis_report_queue.SignalEndOfInput()

        return analysis_report_queue_consumer
Example #4
0
    def _ParseAndAnalyzeFile(self,
                             path_segments,
                             parser,
                             plugin,
                             knowledge_base_values=None):
        """Parses and analyzes a file using the parser and analysis plugin.

    Args:
      path_segments (list[str]): path segments inside the test data directory.
      parser (BaseParser): parser.
      plugin (AnalysisPlugin): plugin.
      knowledge_base_values (Optional[dict[str, str]]): knowledge base values.

    Returns:
      FakeStorageWriter: storage writer.
    """
        knowledge_base_object = self._SetUpKnowledgeBase(
            knowledge_base_values=knowledge_base_values)

        storage_writer = self._ParseFile(path_segments, parser,
                                         knowledge_base_object)

        mediator = analysis_mediator.AnalysisMediator(storage_writer,
                                                      knowledge_base_object)

        for event in storage_writer.GetSortedEvents():
            plugin.ExamineEvent(mediator, event)

        analysis_report = plugin.CompileReport(mediator)
        storage_writer.AddAnalysisReport(analysis_report)

        return storage_writer
Example #5
0
    def setUp(self):
        """Sets up the needed objects used throughout the test."""
        knowledge_base = self._SetUpKnowledgeBase()

        analysis_report_queue = single_process.SingleProcessQueue()
        analysis_report_queue_producer = queue.ItemQueueProducer(
            analysis_report_queue)

        self._analysis_mediator = mediator.AnalysisMediator(
            analysis_report_queue_producer, knowledge_base)
Example #6
0
  def testSignalAbort(self):
    """Tests the SignalAbort function."""
    session = sessions.Session()
    storage_writer = fake_storage.FakeStorageWriter(session)
    knowledge_base = self._SetUpKnowledgeBase()

    analysis_mediator = mediator.AnalysisMediator(
        storage_writer, knowledge_base)

    analysis_mediator.SignalAbort()
Example #7
0
    def setUp(self):
        """Makes preparations before running an individual test."""
        knowledge_base = self._SetUpKnowledgeBase()

        analysis_report_queue = single_process.SingleProcessQueue()
        analysis_report_queue_producer = queue.ItemQueueProducer(
            analysis_report_queue)

        self._analysis_mediator = mediator.AnalysisMediator(
            analysis_report_queue_producer, knowledge_base)
Example #8
0
  def _StartAnalysisPlugins(
      self, storage_file_path, analysis_plugins, pre_obj,
      analysis_queue_port=None, analysis_report_incoming_queue=None,
      command_line_arguments=None):
    """Start all the analysis plugin.

    Args:
      storage_file_path: string containing the path of the storage file.
      analysis_plugins: list of analysis plugin objects (instance of
                        AnalysisPlugin) that should be started.
      pre_obj: The preprocessor object (instance of PreprocessObject).
      analysis_queue_port: optional TCP port that the ZeroMQ analysis report
                           queues should use.
      analysis_report_incoming_queue: optional queue (instance of Queue) that
                                      reports should to pushed to, when ZeroMQ
                                      is not in use.
      command_line_arguments: optional string of the command line arguments or
                              None if not set.
    """
    logging.info(u'Starting analysis plugins.')
    self._SetAnalysisPluginProcessInformation(
        storage_file_path, analysis_plugins, pre_obj,
        command_line_arguments=command_line_arguments)

    knowledge_base_object = knowledge_base.KnowledgeBase(pre_obj=pre_obj)
    for analysis_plugin in analysis_plugins:
      if self._use_zeromq:
        analysis_plugin_output_queue = zeromq_queue.ZeroMQPushConnectQueue(
            delay_open=True, port=analysis_queue_port)
      else:
        analysis_plugin_output_queue = analysis_report_incoming_queue

      analysis_report_queue_producer = plaso_queue.ItemQueueProducer(
          analysis_plugin_output_queue)

      completion_event = multiprocessing.Event()
      analysis_mediator_object = analysis_mediator.AnalysisMediator(
          analysis_report_queue_producer, knowledge_base_object,
          data_location=self._data_location,
          completion_event=completion_event)
      analysis_process = multiprocessing.Process(
          name=u'Analysis {0:s}'.format(analysis_plugin.plugin_name),
          target=analysis_plugin.RunPlugin,
          args=(analysis_mediator_object,))

      process_info = PsortAnalysisProcess(
          completion_event, analysis_plugin, analysis_process)
      self._analysis_process_info.append(process_info)

      analysis_process.start()
      logging.info(
          u'Plugin: [{0:s}] started.'.format(analysis_plugin.plugin_name))

    logging.info(u'Analysis plugins running')
Example #9
0
    def _ParseAndAnalyzeFile(self,
                             path_segments,
                             parser,
                             plugin,
                             knowledge_base_values=None):
        """Parses and analyzes a file using the parser and analysis plugin.

    Args:
      path_segments (list[str]): path segments inside the test data directory.
      parser (BaseParser): parser.
      plugin (AnalysisPlugin): plugin.
      knowledge_base_values (Optional[dict[str, str]]): knowledge base values.

    Returns:
      FakeStorageWriter: storage writer.

    Raises:
      SkipTest: if the path inside the test data directory does not exist and
          the test should be skipped.
    """
        session = sessions.Session()

        knowledge_base_object = self._SetUpKnowledgeBase(
            knowledge_base_values=knowledge_base_values)

        storage_writer = self._ParseFile(path_segments, parser,
                                         knowledge_base_object)

        mediator = analysis_mediator.AnalysisMediator(session,
                                                      knowledge_base_object)
        mediator.SetStorageWriter(storage_writer)

        for event in storage_writer.GetSortedEvents():
            event_data = None
            event_data_identifier = event.GetEventDataIdentifier()
            if event_data_identifier:
                event_data = storage_writer.GetAttributeContainerByIdentifier(
                    events.EventData.CONTAINER_TYPE, event_data_identifier)

            event_data_stream = None
            if event_data:
                event_data_stream_identifier = event_data.GetEventDataStreamIdentifier(
                )
                if event_data_stream_identifier:
                    event_data_stream = storage_writer.GetAttributeContainerByIdentifier(
                        events.EventDataStream.CONTAINER_TYPE,
                        event_data_stream_identifier)

            plugin.ExamineEvent(mediator, event, event_data, event_data_stream)

        analysis_report = plugin.CompileReport(mediator)
        storage_writer.AddAttributeContainer(analysis_report)

        return storage_writer
Example #10
0
    def _TagEvent(self, event, event_data, event_data_stream):
        """Tags an event.

    Args:
      event (Event): event.
      event_data (EventData): event data.
      event_data_stream (EventDataStream): event data stream.

    Returns:
      FakeStorageWriter: storage writer.

    Raises:
      SkipTest: if the tag file does not exist.
    """
        tag_file_path = self._GetDataFilePath([self._TAG_FILE])
        self._SkipIfPathNotExists(tag_file_path)

        session = sessions.Session()

        storage_writer = fake_writer.FakeStorageWriter()
        storage_writer.Open()

        if event_data_stream:
            storage_writer.AddAttributeContainer(event_data_stream)
            event_data_stream_identifier = event_data_stream.GetIdentifier()
            event_data.SetEventDataStreamIdentifier(
                event_data_stream_identifier)

        storage_writer.AddAttributeContainer(event_data)
        event_data_identifier = event_data.GetIdentifier()
        event.SetEventDataIdentifier(event_data_identifier)

        storage_writer.AddAttributeContainer(event)

        knowledge_base_object = knowledge_base.KnowledgeBase()

        mediator = analysis_mediator.AnalysisMediator(session,
                                                      knowledge_base_object)
        mediator.SetStorageWriter(storage_writer)

        plugin = tagging.TaggingAnalysisPlugin()
        plugin.SetAndLoadTagFile(tag_file_path)
        plugin.ExamineEvent(mediator, event, event_data, event_data_stream)

        analysis_report = plugin.CompileReport(mediator)
        storage_writer.AddAttributeContainer(analysis_report)

        return storage_writer
Example #11
0
  def testGetDisplayNameForPathSpec(self):
    """Tests the GetDisplayNameForPathSpec function."""
    session = sessions.Session()
    storage_writer = fake_storage.FakeStorageWriter(session)
    knowledge_base = self._SetUpKnowledgeBase()

    analysis_mediator = mediator.AnalysisMediator(
        storage_writer, knowledge_base)

    test_path = self._GetTestFilePath([u'syslog.gz'])
    os_path_spec = path_spec_factory.Factory.NewPathSpec(
        dfvfs_definitions.TYPE_INDICATOR_OS, location=test_path)

    expected_display_name = u'OS:{0:s}'.format(test_path)
    display_name = analysis_mediator.GetDisplayNameForPathSpec(os_path_spec)
    self.assertEqual(display_name, expected_display_name)
Example #12
0
    def _AnalyzeEvents(self,
                       event_values_list,
                       plugin,
                       knowledge_base_values=None):
        """Analyzes events using the analysis plugin.

    Args:
      event_values_list (list[dict[str, object]]): list of event values.
      plugin (AnalysisPlugin): plugin.
      knowledge_base_values (Optional[dict[str, str]]): knowledge base values.

    Returns:
      FakeStorageWriter: storage writer.
    """
        knowledge_base_object = self._SetUpKnowledgeBase(
            knowledge_base_values=knowledge_base_values)

        session = sessions.Session()
        storage_writer = fake_writer.FakeStorageWriter()
        storage_writer.Open()

        test_events = []
        for event, event_data, event_data_stream in (
                containers_test_lib.CreateEventsFromValues(event_values_list)):
            storage_writer.AddAttributeContainer(event_data_stream)

            event_data.SetEventDataStreamIdentifier(
                event_data_stream.GetIdentifier())
            storage_writer.AddAttributeContainer(event_data)

            event.SetEventDataIdentifier(event_data.GetIdentifier())
            storage_writer.AddAttributeContainer(event)

            test_events.append((event, event_data, event_data_stream))

        mediator = analysis_mediator.AnalysisMediator(session,
                                                      knowledge_base_object)
        mediator.SetStorageWriter(storage_writer)

        for event, event_data, event_data_stream in test_events:
            plugin.ExamineEvent(mediator, event, event_data, event_data_stream)

        analysis_report = plugin.CompileReport(mediator)
        storage_writer.AddAttributeContainer(analysis_report)

        return storage_writer
Example #13
0
    def _RunAnalysisPlugin(self, analysis_plugin, knowledge_base_object):
        """Analyzes an event object queue using the plugin object.

    Args:
      analysis_plugin: the analysis plugin object (instance of AnalysisPlugin).
      knowledge_base_object: the knowledge base object (instance of
                             KnowledgeBase).

    Returns:
      An event object queue object (instance of Queue).
    """
        analysis_report_queue = single_process.SingleProcessQueue()
        analysis_report_queue_consumer = TestAnalysisReportQueueConsumer(
            analysis_report_queue)
        analysis_report_queue_producer = queue.ItemQueueProducer(
            analysis_report_queue)

        analysis_mediator = mediator.AnalysisMediator(
            analysis_report_queue_producer, knowledge_base_object)

        analysis_plugin.RunPlugin(analysis_mediator)

        return analysis_report_queue_consumer
Example #14
0
    def _CreateAnalysisMediator(self, session, knowledge_base,
                                processing_configuration, data_location):
        """Creates an analysis mediator.

    Args:
      session (Session): session in which the sources are processed.
      knowledge_base (KnowledgeBase): knowledge base which contains
          information from the source data needed for parsing.
      processing_configuration (ProcessingConfiguration): processing
          configuration.
      data_location (str): path to the location that data files
          should be loaded from.

    Returns:
      AnalysisMediator: parser mediator.
    """
        mediator = analysis_mediator.AnalysisMediator(
            session, knowledge_base, data_location=data_location)

        # TODO: move data_location to processing_configuration

        mediator.SetTextPrepend(processing_configuration.text_prepend)

        return mediator
Example #15
0
    def _TagEvent(self, event, event_data):
        """Tags an event.

    Args:
      event (Event): event.
      event_data (EventData): event data.

    Returns:
      FakeStorageWriter: storage writer.

    Raises:
      SkipTest: if the tag file does not exist.
    """
        tag_file_path = self._GetDataFilePath([self._TAG_FILE])
        self._SkipIfPathNotExists(tag_file_path)

        session = sessions.Session()

        storage_writer = fake_writer.FakeStorageWriter(session)
        storage_writer.Open()
        storage_writer.AddEventData(event_data)
        storage_writer.AddEvent(event)

        knowledge_base_object = knowledge_base.KnowledgeBase()

        mediator = analysis_mediator.AnalysisMediator(storage_writer,
                                                      knowledge_base_object)

        plugin = tagging.TaggingAnalysisPlugin()
        plugin.SetAndLoadTagFile(tag_file_path)
        plugin.ExamineEvent(mediator, event, event_data)

        analysis_report = plugin.CompileReport(mediator)
        storage_writer.AddAnalysisReport(analysis_report)

        return storage_writer
Example #16
0
    def _Main(self):
        """The main loop."""
        self._StartProfiling(self._processing_configuration.profiling)

        if self._serializers_profiler:
            self._storage_writer.SetSerializersProfiler(
                self._serializers_profiler)

        if self._storage_profiler:
            self._storage_writer.SetStorageProfiler(self._storage_profiler)

        logger.debug('Analysis plugin: {0!s} (PID: {1:d}) started'.format(
            self._name, self._pid))

        # Creating the threading event in the constructor will cause a pickle
        # error on Windows when an analysis process is created.
        self._foreman_status_wait_event = threading.Event()
        self._status = definitions.PROCESSING_STATUS_ANALYZING

        task = tasks.Task()
        # TODO: temporary solution.
        task.identifier = self._analysis_plugin.plugin_name

        self._task = task

        storage_writer = self._storage_writer.CreateTaskStorage(task)

        if self._serializers_profiler:
            storage_writer.SetSerializersProfiler(self._serializers_profiler)

        if self._storage_profiler:
            storage_writer.SetStorageProfiler(self._storage_profiler)

        storage_writer.Open()

        self._analysis_mediator = analysis_mediator.AnalysisMediator(
            storage_writer,
            self._knowledge_base,
            data_location=self._data_location)

        # TODO: set event_filter_expression in mediator.

        storage_writer.WriteTaskStart()

        try:
            logger.debug(
                '{0!s} (PID: {1:d}) started monitoring event queue.'.format(
                    self._name, self._pid))

            while not self._abort:
                try:
                    event = self._event_queue.PopItem()

                except (errors.QueueClose, errors.QueueEmpty) as exception:
                    logger.debug(
                        'ConsumeItems exiting with exception {0:s}.'.format(
                            type(exception)))
                    break

                if isinstance(event, plaso_queue.QueueAbort):
                    logger.debug(
                        'ConsumeItems exiting, dequeued QueueAbort object.')
                    break

                self._ProcessEvent(self._analysis_mediator, event)

                self._number_of_consumed_events += 1

                if self._guppy_memory_profiler:
                    self._guppy_memory_profiler.Sample()

            logger.debug(
                '{0!s} (PID: {1:d}) stopped monitoring event queue.'.format(
                    self._name, self._pid))

            if not self._abort:
                self._status = definitions.PROCESSING_STATUS_REPORTING

                self._analysis_mediator.ProduceAnalysisReport(
                    self._analysis_plugin)

        # All exceptions need to be caught here to prevent the process
        # from being killed by an uncaught exception.
        except Exception as exception:  # pylint: disable=broad-except
            logger.warning(
                'Unhandled exception in process: {0!s} (PID: {1:d}).'.format(
                    self._name, self._pid))
            logger.exception(exception)

            self._abort = True

        finally:
            storage_writer.WriteTaskCompletion(aborted=self._abort)

            storage_writer.Close()

            if self._serializers_profiler:
                storage_writer.SetSerializersProfiler(None)

            if self._storage_profiler:
                storage_writer.SetStorageProfiler(None)

        try:
            self._storage_writer.FinalizeTaskStorage(task)
        except IOError:
            pass

        if self._abort:
            self._status = definitions.PROCESSING_STATUS_ABORTED
        else:
            self._status = definitions.PROCESSING_STATUS_COMPLETED

        self._foreman_status_wait_event.wait(self._FOREMAN_STATUS_WAIT)

        logger.debug('Analysis plugin: {0!s} (PID: {1:d}) stopped'.format(
            self._name, self._pid))

        if self._serializers_profiler:
            self._storage_writer.SetSerializersProfiler(None)

        if self._storage_profiler:
            self._storage_writer.SetStorageProfiler(None)

        self._StopProfiling()

        self._analysis_mediator = None
        self._foreman_status_wait_event = None
        self._storage_writer = None
        self._task = None

        try:
            self._event_queue.Close(abort=self._abort)
        except errors.QueueAlreadyClosed:
            logger.error('Queue for {0:s} was already closed.'.format(
                self.name))
Example #17
0
    def ProcessStorage(self,
                       output_module,
                       storage_file,
                       analysis_plugins,
                       event_queue_producers,
                       deduplicate_events=True,
                       preferred_encoding=u'utf-8',
                       time_slice=None,
                       use_time_slicer=False):
        """Processes a plaso storage file.

    Args:
      output_module: an output module (instance of OutputModule).
      storage_file: the storage file object (instance of StorageFile).
      analysis_plugins: list of analysis plugin objects (instance of
                        AnalysisPlugin).
      event_queue_producers: list of event queue producer objects (instance
                             of ItemQueueProducer).
      deduplicate_events: optional boolean value to indicate if the event
                          objects should be deduplicated. The default is True.
      preferred_encoding: optional preferred encoding. The default is "utf-8".
      time_slice: optional time slice object (instance of TimeSlice).
                  The default is None.
      use_time_slicer: optional boolean value to indicate the 'time slicer'
                       should be used. The default is False. The 'time slicer'
                       will provide a context of events around an event of
                       interest.

    Returns:
      A counter (an instance of counter.Counter) that contains the analysis
      plugin results or None.

    Raises:
      RuntimeError: if a non-recoverable situation is encountered.
    """
        if time_slice:
            if time_slice.event_timestamp:
                pfilter.TimeRangeCache.SetLowerTimestamp(
                    time_slice.start_timestamp)
                pfilter.TimeRangeCache.SetUpperTimestamp(
                    time_slice.end_timestamp)

            elif use_time_slicer:
                self._filter_buffer = bufferlib.CircularBuffer(
                    time_slice.duration)

        with storage_file:
            storage_file.SetStoreLimit(self._filter_object)

            # TODO: allow for single processing.
            # TODO: add upper queue limit.
            analysis_output_queue = multi_process.MultiProcessingQueue(
                timeout=5)

            if analysis_plugins:
                logging.info(u'Starting analysis plugins.')
                # Within all preprocessing objects, try to get the last one that has
                # time zone information stored in it, the highest chance of it
                # containing the information we are seeking (defaulting to the last
                # one).
                pre_objs = storage_file.GetStorageInformation()
                pre_obj = pre_objs[-1]
                for obj in pre_objs:
                    if getattr(obj, u'time_zone_str', u''):
                        pre_obj = obj

                # Fill in the collection information.
                pre_obj.collection_information = {}
                if preferred_encoding:
                    cmd_line = u' '.join(sys.argv)
                    try:
                        pre_obj.collection_information[
                            u'cmd_line'] = cmd_line.decode(preferred_encoding)
                    except UnicodeDecodeError:
                        pass
                pre_obj.collection_information[u'file_processed'] = (
                    self._storage_file)
                pre_obj.collection_information[
                    u'method'] = u'Running Analysis Plugins'
                analysis_plugin_names = [
                    plugin.NAME for plugin in analysis_plugins
                ]
                pre_obj.collection_information[
                    u'plugins'] = analysis_plugin_names
                time_of_run = timelib.Timestamp.GetNow()
                pre_obj.collection_information[u'time_of_run'] = time_of_run

                pre_obj.counter = collections.Counter()

                # Assign the preprocessing object to the storage.
                # This is normally done in the construction of the storage object,
                # however we cannot do that here since the preprocessing object is
                # stored inside the storage file, so we need to open it first to
                # be able to read it in, before we make changes to it. Thus we need
                # to access this protected member of the class.
                # pylint: disable=protected-access
                storage_file._pre_obj = pre_obj

                knowledge_base_object = knowledge_base.KnowledgeBase(
                    pre_obj=pre_obj)

                # Now we need to start all the plugins.
                for analysis_plugin in analysis_plugins:
                    analysis_report_queue_producer = queue.ItemQueueProducer(
                        analysis_output_queue)

                    completion_event = multiprocessing.Event()
                    analysis_mediator_object = analysis_mediator.AnalysisMediator(
                        analysis_report_queue_producer,
                        knowledge_base_object,
                        data_location=self._data_location,
                        completion_event=completion_event)
                    analysis_process = multiprocessing.Process(
                        name=u'Analysis {0:s}'.format(
                            analysis_plugin.plugin_name),
                        target=analysis_plugin.RunPlugin,
                        args=(analysis_mediator_object, ))
                    process_info = PsortAnalysisProcess(
                        completion_event, analysis_plugin, analysis_process)
                    self._analysis_process_info.append(process_info)

                    analysis_process.start()
                    logging.info(u'Plugin: [{0:s}] started.'.format(
                        analysis_plugin.plugin_name))
            else:
                event_queue_producers = []

            output_buffer = output_interface.EventBuffer(
                output_module, deduplicate_events)
            with output_buffer:
                counter = self.ProcessOutput(
                    storage_file,
                    output_buffer,
                    my_filter=self._filter_object,
                    filter_buffer=self._filter_buffer,
                    analysis_queues=event_queue_producers)

            for information in storage_file.GetStorageInformation():
                if hasattr(information, u'counter'):
                    counter[u'Stored Events'] += information.counter[u'total']

            if not self._quiet_mode:
                logging.info(u'Output processing is done.')

            # Get all reports and tags from analysis plugins.
            self._ProcessAnalysisPlugins(analysis_plugins,
                                         analysis_output_queue,
                                         storage_file,
                                         counter,
                                         preferred_encoding=preferred_encoding)

        if self._output_file_object:
            self._output_file_object.close()
            self._output_file_object = None

        if self._filter_object and not counter[u'Limited By']:
            counter[u'Filter By Date'] = (counter[u'Stored Events'] -
                                          counter[u'Events Included'] -
                                          counter[u'Events Filtered Out'])

        return counter
Example #18
0
    def ProcessStorage(self, options):
        """Open a storage file and processes the events within.

    Args:
      options: the command line arguments (instance of argparse.Namespace).

    Returns:
      A counter.

    Raises:
      RuntimeError: if a non-recoverable situation is encountered.
    """
        counter = None

        slice_option = getattr(options, u'slice', None)
        if slice_option:
            timezone = getattr(options, u'timezone', u'UTC')
            if timezone == u'UTC':
                zone = pytz.UTC
            else:
                zone = pytz.timezone(timezone)

            timestamp = timelib.Timestamp.FromTimeString(slice_option,
                                                         timezone=zone)

            # Convert number of minutes to microseconds.
            range_operator = self._slice_size * 60 * 1000000

            # Set the time range.
            pfilter.TimeRangeCache.SetLowerTimestamp(timestamp -
                                                     range_operator)
            pfilter.TimeRangeCache.SetUpperTimestamp(timestamp +
                                                     range_operator)

        analysis_plugins = getattr(options, u'analysis_plugins', u'')
        if analysis_plugins:
            read_only = False
        else:
            read_only = True
        analysis_plugins_output_format = getattr(options,
                                                 u'windows-services-output',
                                                 u'text')

        try:
            storage_file = self.OpenStorageFile(read_only=read_only)
        except IOError as exception:
            raise RuntimeError(
                u'Unable to open storage file: {0:s} with error: {1:s}.'.
                format(self._storage_file_path, exception))

        with storage_file:
            storage_file.SetStoreLimit(self._filter_object)

            formatter_mediator = self.GetFormatterMediator()

            try:
                formatter_mediator.SetPreferredLanguageIdentifier(
                    self._preferred_language)
            except (KeyError, TypeError) as exception:
                raise RuntimeError(exception)

            output_mediator_object = output_mediator.OutputMediator(
                formatter_mediator, storage_file, config=options)

            kwargs = {}
            # TODO: refactor this to use CLI argument helpers.
            if self._output_format in [u'pstorage', u'sql4n6']:
                kwargs[u'filehandle'] = self._output_filename
            elif self._output_format not in [u'elastic', u'timesketch']:
                if self._output_filename:
                    self._output_file_object = open(self._output_filename,
                                                    'wb')
                    kwargs[
                        u'output_writer'] = cli_tools.FileObjectOutputWriter(
                            self._output_file_object)
                else:
                    kwargs[u'output_writer'] = self._output_writer

            try:
                output_module = output_manager.OutputManager.NewOutputModule(
                    self._output_format, output_mediator_object, **kwargs)

            except IOError as exception:
                raise RuntimeError(
                    u'Unable to create output module with error: {0:s}'.format(
                        exception))

            if not output_module:
                raise RuntimeError(u'Missing output module.')

            if analysis_plugins:
                logging.info(u'Starting analysis plugins.')
                # Within all preprocessing objects, try to get the last one that has
                # time zone information stored in it, the highest chance of it
                # containing the information we are seeking (defaulting to the last
                # one).
                pre_objs = storage_file.GetStorageInformation()
                pre_obj = pre_objs[-1]
                for obj in pre_objs:
                    if getattr(obj, u'time_zone_str', u''):
                        pre_obj = obj

                # Fill in the collection information.
                pre_obj.collection_information = {}
                encoding = getattr(pre_obj, u'preferred_encoding', None)
                if encoding:
                    cmd_line = u' '.join(sys.argv)
                    try:
                        pre_obj.collection_information[
                            u'cmd_line'] = cmd_line.decode(encoding)
                    except UnicodeDecodeError:
                        pass
                pre_obj.collection_information[u'file_processed'] = (
                    self._storage_file_path)
                pre_obj.collection_information[
                    u'method'] = u'Running Analysis Plugins'
                pre_obj.collection_information[u'plugins'] = analysis_plugins
                time_of_run = timelib.Timestamp.GetNow()
                pre_obj.collection_information[u'time_of_run'] = time_of_run

                pre_obj.counter = collections.Counter()

                # Assign the preprocessing object to the storage.
                # This is normally done in the construction of the storage object,
                # however we cannot do that here since the preprocessing object is
                # stored inside the storage file, so we need to open it first to
                # be able to read it in, before we make changes to it. Thus we need
                # to access this protected member of the class.
                # pylint: disable=protected-access
                storage_file._pre_obj = pre_obj

                # Start queues and load up plugins.
                # TODO: add upper queue limit.
                analysis_output_queue = multi_process.MultiProcessingQueue()
                event_queue_producers = []
                event_queues = []
                analysis_plugins_list = [
                    name.strip() for name in analysis_plugins.split(u',')
                ]

                for _ in xrange(0, len(analysis_plugins_list)):
                    # TODO: add upper queue limit.
                    analysis_plugin_queue = multi_process.MultiProcessingQueue(
                    )
                    event_queues.append(analysis_plugin_queue)
                    event_queue_producers.append(
                        queue.ItemQueueProducer(event_queues[-1]))

                knowledge_base_object = knowledge_base.KnowledgeBase()

                analysis_plugins = analysis_manager.AnalysisPluginManager.LoadPlugins(
                    analysis_plugins_list, event_queues, options=options)

                # Now we need to start all the plugins.
                for analysis_plugin in analysis_plugins:
                    analysis_report_queue_producer = queue.ItemQueueProducer(
                        analysis_output_queue)

                    analysis_mediator_object = analysis_mediator.AnalysisMediator(
                        analysis_report_queue_producer,
                        knowledge_base_object,
                        output_format=analysis_plugins_output_format)
                    analysis_process = multiprocessing.Process(
                        name=u'Analysis {0:s}'.format(
                            analysis_plugin.plugin_name),
                        target=analysis_plugin.RunPlugin,
                        args=(analysis_mediator_object, ))
                    self._analysis_processes.append(analysis_process)

                    analysis_process.start()
                    logging.info(u'Plugin: [{0:s}] started.'.format(
                        analysis_plugin.plugin_name))
            else:
                event_queue_producers = []

            deduplicate_events = getattr(options, u'dedup', True)
            output_buffer = output_interface.EventBuffer(
                output_module, deduplicate_events)
            with output_buffer:
                counter = self.ProcessOutput(
                    storage_file,
                    output_buffer,
                    my_filter=self._filter_object,
                    filter_buffer=self._filter_buffer,
                    analysis_queues=event_queue_producers)

            for information in storage_file.GetStorageInformation():
                if hasattr(information, u'counter'):
                    counter[u'Stored Events'] += information.counter[u'total']

            if not getattr(options, u'quiet', False):
                logging.info(u'Output processing is done.')

            # Get all reports and tags from analysis plugins.
            if analysis_plugins:
                logging.info(u'Processing data from analysis plugins.')
                for event_queue_producer in event_queue_producers:
                    event_queue_producer.SignalEndOfInput()

                # Wait for all analysis plugins to complete.
                for number, analysis_process in enumerate(
                        self._analysis_processes):
                    logging.debug(
                        u'Waiting for analysis plugin: {0:d} to complete.'.
                        format(number))
                    if analysis_process.is_alive():
                        analysis_process.join(10)
                    else:
                        logging.warning(
                            u'Plugin {0:d} already stopped.'.format(number))
                        analysis_process.terminate()
                logging.debug(u'All analysis plugins are now stopped.')

                # Close the output queue.
                analysis_output_queue.SignalEndOfInput()

                # Go over each output.
                analysis_queue_consumer = PsortAnalysisReportQueueConsumer(
                    analysis_output_queue, storage_file,
                    self._filter_expression, self.preferred_encoding)

                analysis_queue_consumer.ConsumeItems()

                if analysis_queue_consumer.tags:
                    storage_file.StoreTagging(analysis_queue_consumer.tags)

                # TODO: analysis_queue_consumer.anomalies:

                for item, value in analysis_queue_consumer.counter.iteritems():
                    counter[item] = value

        if self._output_file_object:
            self._output_file_object.close()
            self._output_file_object = None

        if self._filter_object and not counter[u'Limited By']:
            counter[u'Filter By Date'] = (counter[u'Stored Events'] -
                                          counter[u'Events Included'] -
                                          counter[u'Events Filtered Out'])

        return counter