Exemple #1
0
    def testFlush(self):
        """Tests the Flush function."""
        output_mediator = self._CreateOutputMediator()
        output_writer = cli_test_lib.TestOutputWriter()
        output_module = test_lib.TestOutputModule(output_mediator)
        output_module.SetOutputWriter(output_writer)
        event_buffer_object = event_buffer.EventBuffer(output_module, False)

        event_buffer_object.Append(TestEvent(123456, u'Now is now'))
        self._CheckBufferLength(event_buffer_object, 1)

        event_buffer_object.Append(TestEvent(123456, u'OMG I AM DIFFERENT'))
        event_buffer_object.Append(TestEvent(123456, u'Now is now'))
        event_buffer_object.Append(TestEvent(123456, u'Now is now'))
        self._CheckBufferLength(event_buffer_object, 2)

        event_buffer_object.Flush()
        self._CheckBufferLength(event_buffer_object, 0)

        event_buffer_object.Append(TestEvent(123456, u'Now is now'))
        event_buffer_object.Append(TestEvent(123456, u'Now is now'))
        event_buffer_object.Append(TestEvent(123456, u'Different again :)'))
        self._CheckBufferLength(event_buffer_object, 2)
        event_buffer_object.Append(TestEvent(123457, u'Now is different'))
        self._CheckBufferLength(event_buffer_object, 1)
Exemple #2
0
  def testFlush(self):
    """Test to ensure we empty our buffers and sends to output properly."""
    output_mediator = self._CreateOutputMediator()
    output_writer = cli_test_lib.TestOutputWriter()
    output_module = test_lib.TestOutputModule(output_mediator)
    output_module.SetOutputWriter(output_writer)
    event_buffer_object = event_buffer.EventBuffer(output_module, False)

    event_buffer_object.Append(TestEvent(123456, u'Now is now'))
    self._CheckBufferLength(event_buffer_object, 1)

    # Add three events.
    event_buffer_object.Append(TestEvent(123456, u'OMG I AM DIFFERENT'))
    event_buffer_object.Append(TestEvent(123456, u'Now is now'))
    event_buffer_object.Append(TestEvent(123456, u'Now is now'))
    self._CheckBufferLength(event_buffer_object, 2)

    event_buffer_object.Flush()
    self._CheckBufferLength(event_buffer_object, 0)

    event_buffer_object.Append(TestEvent(123456, u'Now is now'))
    event_buffer_object.Append(TestEvent(123456, u'Now is now'))
    event_buffer_object.Append(TestEvent(123456, u'Different again :)'))
    self._CheckBufferLength(event_buffer_object, 2)
    event_buffer_object.Append(TestEvent(123457, u'Now is different'))
    self._CheckBufferLength(event_buffer_object, 1)
Exemple #3
0
    def ExportEvents(self,
                     knowledge_base_object,
                     storage_reader,
                     output_module,
                     deduplicate_events=True,
                     event_filter=None,
                     status_update_callback=None,
                     time_slice=None,
                     use_time_slicer=False):
        """Exports events using an output module.

    Args:
      knowledge_base_object (KnowledgeBase): contains information from
          the source data needed for processing.
      storage_reader (StorageReader): storage reader.
      output_module (OutputModule): output module.
      deduplicate_events (Optional[bool]): True if events should be
          deduplicated.
      event_filter (Optional[FilterObject]): event filter.
      status_update_callback (Optional[function]): callback function for status
          updates.
      time_slice (Optional[TimeSlice]): slice of time to output.
      use_time_slicer (Optional[bool]): True if the 'time slicer' should be
          used. The 'time slicer' will provide a context of events around
          an event of interest.

    Returns:
      collections.Counter: counter that tracks the number of events extracted
          from storage.
    """
        self._status_update_callback = status_update_callback

        storage_reader.ReadPreprocessingInformation(knowledge_base_object)

        event_buffer = output_event_buffer.EventBuffer(output_module,
                                                       deduplicate_events)

        self._StartStatusUpdateThread()

        try:
            with event_buffer:
                events_counter = self._ExportEvents(
                    storage_reader,
                    event_buffer,
                    event_filter=event_filter,
                    time_slice=time_slice,
                    use_time_slicer=use_time_slicer)

        finally:
            # Stop the status update thread after close of the storage writer
            # so we include the storage sync to disk in the status updates.
            self._StopStatusUpdateThread()

        # Reset values.
        self._status_update_callback = None

        return events_counter
Exemple #4
0
    def testOutput(self):
        """Tests the Output function."""
        test_filename = os.path.join(u'test_data', u'psort_test.json.plaso')

        with shared_test_lib.TempDirectory() as temp_directory:
            temp_file = os.path.join(temp_directory, u'pstorage.plaso')

            storage_file = storage_zip_file.StorageFile(test_filename,
                                                        read_only=True)
            with storage_zip_file.ZIPStorageFileReader(
                    storage_file) as storage_reader:

                output_mediator = self._CreateOutputMediator(
                    storage_file=storage_file)
                output_module = pstorage.PlasoStorageOutputModule(
                    output_mediator)

                output_module.SetFilePath(temp_file)

                with event_buffer.EventBuffer(
                        output_module, check_dedups=False) as output_buffer:
                    for event_object in storage_reader.GetEvents():
                        output_buffer.Append(event_object)

            original_zip_file = storage_zip_file.StorageFile(test_filename,
                                                             read_only=True)
            pstorage_zip_file = storage_zip_file.StorageFile(temp_file,
                                                             read_only=True)

            original_list = []
            pstorage_list = []

            event_object_original = original_zip_file.GetSortedEntry()
            event_object_pstorage = pstorage_zip_file.GetSortedEntry()
            while event_object_original:
                original_equality_string = event_object_original.EqualityString(
                )
                pstorage_equality_string = event_object_pstorage.EqualityString(
                )

                # Remove the UUID for comparision.
                original_equality_string, _, _ = original_equality_string.rpartition(
                    u'|')
                pstorage_equality_string, _, _ = pstorage_equality_string.rpartition(
                    u'|')

                original_list.append(original_equality_string)
                pstorage_list.append(pstorage_equality_string)

                event_object_original = original_zip_file.GetSortedEntry()
                event_object_pstorage = pstorage_zip_file.GetSortedEntry()

            self.assertFalse(event_object_pstorage)

            for original_str, dump_str in zip(sorted(original_list),
                                              sorted(pstorage_list)):
                self.assertEqual(original_str, dump_str)
Exemple #5
0
    def testOutput(self):
        """Tests the Output function."""
        with shared_test_lib.TempDirectory() as temp_directory:
            temp_file = os.path.join(temp_directory, u'pstorage.plaso')

            # Copy events to pstorage dump.
            storage_file = storage_zip_file.StorageFile(self._test_filename,
                                                        read_only=True)

            with storage_zip_file.ZIPStorageFileReader(
                    storage_file) as storage_reader:

                output_mediator = self._CreateOutputMediator(
                    storage_file=storage_file)
                output_module = pstorage.PlasoStorageOutputModule(
                    output_mediator)

                output_module.SetFilePath(temp_file)

                with event_buffer.EventBuffer(
                        output_module, check_dedups=False) as output_buffer:
                    for event_object in storage_reader.GetEvents():
                        output_buffer.Append(event_object)

            # Make sure original and dump have the same events.
            original = storage_zip_file.StorageFile(self._test_filename,
                                                    read_only=True)
            dump = storage_zip_file.StorageFile(temp_file, read_only=True)
            event_object_original = original.GetSortedEntry()
            event_object_dump = dump.GetSortedEntry()
            original_list = []
            dump_list = []

            while event_object_original:
                original_list.append(event_object_original.EqualityString())
                dump_list.append(event_object_dump.EqualityString())
                event_object_original = original.GetSortedEntry()
                event_object_dump = dump.GetSortedEntry()

            self.assertFalse(event_object_dump)

            for original_str, dump_str in zip(sorted(original_list),
                                              sorted(dump_list)):
                self.assertEqual(original_str, dump_str)
Exemple #6
0
    def ProcessStorage(self,
                       output_module,
                       storage_file,
                       storage_file_path,
                       analysis_plugins,
                       event_queue_producers,
                       command_line_arguments=None,
                       deduplicate_events=True,
                       preferred_encoding=u'utf-8',
                       time_slice=None,
                       use_time_slicer=False):
        """Processes a plaso storage file.

    Args:
      output_module: an output module (instance of OutputModule).
      storage_file: the storage file object (instance of StorageFile).
      storage_file_path: string containing the path of the storage file.
      analysis_plugins: list of analysis plugin objects (instance of
                        AnalysisPlugin).
      event_queue_producers: list of event queue producer objects (instance
                             of ItemQueueProducer).
      command_line_arguments: optional string of the command line arguments or
                              None if not set.
      deduplicate_events: optional boolean value to indicate if the event
                          objects should be deduplicated.
      preferred_encoding: optional preferred encoding.
      time_slice: optional time slice object (instance of TimeSlice).
      use_time_slicer: optional boolean value to indicate the 'time slicer'
                       should be used. The 'time slicer' will provide a
                       context of events around an event of interest.

    Returns:
      A counter (an instance of collections.Counter) that tracks the number of
      events extracted from storage, and the analysis plugin results.

    Raises:
      RuntimeError: if a non-recoverable situation is encountered.
    """
        time_slice = None
        if time_slice:
            if time_slice.event_timestamp is not None:
                time_slice = storage_time_range.TimeRange(
                    time_slice.start_timestamp, time_slice.end_timestamp)

            elif use_time_slicer:
                self._filter_buffer = bufferlib.CircularBuffer(
                    time_slice.duration)

        with storage_file:
            # TODO: allow for single processing.
            # TODO: add upper queue limit.
            analysis_queue_port = None
            if self._use_zeromq:
                analysis_report_incoming_queue = zeromq_queue.ZeroMQPullBindQueue(
                    delay_open=False, port=None, linger_seconds=5)
                analysis_queue_port = analysis_report_incoming_queue.port
            else:
                analysis_report_incoming_queue = multi_process.MultiProcessingQueue(
                    timeout=5)

            pre_obj = self._GetLastGoodPreprocess(storage_file)
            if pre_obj is None:
                pre_obj = event.PreprocessObject()

            if analysis_plugins:
                self._StartAnalysisPlugins(
                    storage_file_path,
                    analysis_plugins,
                    pre_obj,
                    analysis_queue_port=analysis_queue_port,
                    analysis_report_incoming_queue=
                    analysis_report_incoming_queue,
                    command_line_arguments=command_line_arguments)

                # Assign the preprocessing object to the storage.
                # This is normally done in the construction of the storage object,
                # however we cannot do that here since the preprocessing object is
                # stored inside the storage file, so we need to open it first to
                # be able to read it in, before we make changes to it. Thus we need
                # to access this protected member of the class.
                # pylint: disable=protected-access
                storage_file._pre_obj = pre_obj
            else:
                event_queue_producers = []

            output_buffer = output_event_buffer.EventBuffer(
                output_module, deduplicate_events)
            with output_buffer:
                counter = self.ProcessEventsFromStorage(
                    storage_file,
                    output_buffer,
                    analysis_queues=event_queue_producers,
                    filter_buffer=self._filter_buffer,
                    my_filter=self._filter_object,
                    time_slice=time_slice)

            for information in storage_file.GetStorageInformation():
                if hasattr(information, u'counter'):
                    counter[u'Stored Events'] += information.counter[u'total']

            if not self._quiet_mode:
                logging.info(u'Output processing is done.')

            # Get all reports and tags from analysis plugins.
            self._ProcessAnalysisPlugins(analysis_plugins,
                                         analysis_report_incoming_queue,
                                         storage_file,
                                         counter,
                                         preferred_encoding=preferred_encoding)

        if self._filter_object and not counter[u'Limited By']:
            counter[u'Filter By Date'] = (counter[u'Stored Events'] -
                                          counter[u'Events Included'] -
                                          counter[u'Events Filtered Out'])

        return counter