示例#1
0
def CreateEventFromValues(event_values):
  """Creates an event and event data from event values.

  Args:
    event_values (dict[str, str]): event values.

  Returns:
    tuple[EventObject, EventData, EventDataStream]: event, event data and
        event data stream for testing.
  """
  copy_of_event_values = dict(event_values)

  event = events.EventObject()
  for attribute_name in ('timestamp', 'timestamp_desc'):
    attribute_value = copy_of_event_values.pop(attribute_name, None)
    if attribute_value is not None:
      if attribute_name == 'timestamp' and isinstance(attribute_value, str):
        attribute_value = shared_test_lib.CopyTimestampFromString(
            attribute_value)
      setattr(event, attribute_name, attribute_value)

  event.date_time = dfdatetime_posix_time.PosixTimeInMicroseconds(
      timestamp=event.timestamp)

  event_data_stream = events.EventDataStream()
  for attribute_name in ('path_spec', 'md5_hash', 'sha256_hash'):
    attribute_value = copy_of_event_values.pop(attribute_name, None)
    if attribute_value is not None:
      setattr(event_data_stream, attribute_name, attribute_value)

  event_data = events.EventData()
  event_data.CopyFromDict(copy_of_event_values)

  return event, event_data, event_data_stream
示例#2
0
    def testPrintStorageInformationAsJSON(self):
        """Tests the PrintStorageInformation function with JSON output format."""
        test_filename = 'pinfo_test.plaso'
        session_identifier = '17c2f64c-ff4c-493d-b79d-18f31deaf7d5'
        session_start_time = '2021-11-21 16:57:49.936026'

        test_file_path = self._GetTestFilePath([test_filename])
        self._SkipIfPathNotExists(test_file_path)

        options = test_lib.TestOptions()
        options.storage_file = test_file_path
        options.output_format = 'json'
        options.sections = 'events,reports,sessions,warnings'

        output_writer = test_lib.TestOutputWriter(encoding='utf-8')
        test_tool = pinfo_tool.PinfoTool(output_writer=output_writer)
        test_tool.ParseOptions(options)

        test_tool.PrintStorageInformation()
        output = output_writer.ReadOutput()
        json_output = json.loads(output)

        sessions = json_output.get('sessions')
        self.assertIsNotNone(sessions)

        first_session = sessions.get('session')
        self.assertIsNotNone(first_session)

        self.assertEqual(first_session['identifier'],
                         session_identifier.replace('-', ''))

        expected_start_time = shared_test_lib.CopyTimestampFromString(
            session_start_time)
        self.assertEqual(first_session['start_time'], expected_start_time)

        storage_counters = json_output.get('storage_counters')
        self.assertIsNotNone(storage_counters)

        parsers_counter = storage_counters['parsers']
        self.assertIsNotNone(parsers_counter)
        self.assertEqual(parsers_counter['total'], 3)
        self.assertEqual(parsers_counter['filestat'], 3)
示例#3
0
    def testPrintStorageInformationAsJSON(self):
        """Tests the PrintStorageInformation function with JSON output format."""
        test_filename = 'pinfo_test.plaso'
        session_identifier = '678d3612-feac-4de7-b929-0bd3260a9365'
        session_start_time = '2021-06-23 07:42:30.094310'

        test_file_path = self._GetTestFilePath([test_filename])
        self._SkipIfPathNotExists(test_file_path)

        options = test_lib.TestOptions()
        options.storage_file = test_file_path
        options.output_format = 'json'
        options.sections = 'events,reports,sessions,warnings'

        output_writer = test_lib.TestOutputWriter(encoding='utf-8')
        test_tool = pinfo_tool.PinfoTool(output_writer=output_writer)
        test_tool.ParseOptions(options)

        test_tool.PrintStorageInformation()
        output = output_writer.ReadOutput()
        json_output = json.loads(output)

        sessions = json_output.get('sessions')
        self.assertIsNotNone(sessions)

        first_session = sessions.get('session')
        self.assertIsNotNone(first_session)

        self.assertEqual(first_session['identifier'],
                         session_identifier.replace('-', ''))

        expected_start_time = shared_test_lib.CopyTimestampFromString(
            session_start_time)
        self.assertEqual(first_session['start_time'], expected_start_time)

        parsers_counter = first_session['parsers_counter']
        self.assertEqual(parsers_counter['total'], 3)
        self.assertEqual(parsers_counter['filestat'], 3)
示例#4
0
class TaggingFileTestCase(shared_test_lib.BaseTestCase):
    """The unit test case for a tagging file."""

    _TAG_FILE = None

    _TEST_TIMESTAMP = shared_test_lib.CopyTimestampFromString(
        '2020-04-04 14:56:39')

    def _CheckLabels(self, storage_writer, expected_labels):
        """Checks the labels of tagged events.

    Args:
      storage_writer (FakeStorageWriter): storage writer used for testing.
      expected_labels (list[str]): expected labels.
    """
        labels = []
        for event_tag in storage_writer.GetAttributeContainers(
                events.EventTag.CONTAINER_TYPE):
            labels.extend(event_tag.labels)

        labels = set(labels)
        expected_labels = set(expected_labels)

        self.assertEqual(len(labels), len(expected_labels))
        self.assertEqual(sorted(labels), sorted(expected_labels))

    def _CheckTaggingRule(self, event_data_class, attribute_values_per_name,
                          expected_rule_names):
        """Tests a tagging rule.

    Args:
      event_data_class (type): class of the event data object to use in tests.
      attribute_values_per_name (dict[str, list[str]): values of the event data
          attribute values per name, to use for testing events that match the
          tagging rule.
      expected_rule_names (list[str]): expected rule names.
    """
        event = events.EventObject()
        event.timestamp = self._TEST_TIMESTAMP
        event.timestamp_desc = definitions.TIME_DESCRIPTION_UNKNOWN

        if not attribute_values_per_name:
            event_data = event_data_class()
            event_data.parser = 'test'
            storage_writer = self._TagEvent(event, event_data, None)

            self.assertEqual(storage_writer.number_of_event_tags,
                             len(expected_rule_names))
            self._CheckLabels(storage_writer, expected_rule_names)

        else:
            maximum_number_of_attribute_values = max([
                len(attribute_values)
                for attribute_values in attribute_values_per_name.values()
            ])

            # Test if variations defined by the attribute_values_per_name match
            # the tagging rule.
            for test_index in range(maximum_number_of_attribute_values):
                # Create the test event data and set the attributes to one of
                # the test values.
                event_data = event_data_class()
                event_data.parser = 'test'
                for attribute_name, attribute_values in (
                        attribute_values_per_name.items()):
                    attribute_value_index = min(test_index,
                                                len(attribute_values) - 1)
                    attribute_value = attribute_values[attribute_value_index]
                    setattr(event_data, attribute_name, attribute_value)

                storage_writer = self._TagEvent(event, event_data, None)

                self.assertEqual(storage_writer.number_of_event_tags,
                                 len(expected_rule_names))
                self._CheckLabels(storage_writer, expected_rule_names)

            # Test if bogus variations on attribute_values_per_name do not match
            # the tagging rule.
            for test_attribute_name in attribute_values_per_name.keys():
                # Create the test event data and set the attributes to one of
                # the test values.
                event_data = event_data_class()
                event_data.parser = 'test'
                for attribute_name, attribute_values in (
                        attribute_values_per_name.items()):
                    if attribute_name == test_attribute_name:
                        attribute_value = 'BOGUS'
                    else:
                        attribute_value = attribute_values[0]
                    setattr(event_data, attribute_name, attribute_value)

                storage_writer = self._TagEvent(event, event_data, None)

                self.assertEqual(storage_writer.number_of_event_tags, 0)
                self._CheckLabels(storage_writer, [])

    def _TagEvent(self, event, event_data, event_data_stream):
        """Tags an event.

    Args:
      event (Event): event.
      event_data (EventData): event data.
      event_data_stream (EventDataStream): event data stream.

    Returns:
      FakeStorageWriter: storage writer.

    Raises:
      SkipTest: if the tag file does not exist.
    """
        tag_file_path = self._GetDataFilePath([self._TAG_FILE])
        self._SkipIfPathNotExists(tag_file_path)

        session = sessions.Session()

        storage_writer = fake_writer.FakeStorageWriter()
        storage_writer.Open()

        if event_data_stream:
            storage_writer.AddAttributeContainer(event_data_stream)
            event_data_stream_identifier = event_data_stream.GetIdentifier()
            event_data.SetEventDataStreamIdentifier(
                event_data_stream_identifier)

        storage_writer.AddAttributeContainer(event_data)
        event_data_identifier = event_data.GetIdentifier()
        event.SetEventDataIdentifier(event_data_identifier)

        storage_writer.AddAttributeContainer(event)

        knowledge_base_object = knowledge_base.KnowledgeBase()

        mediator = analysis_mediator.AnalysisMediator(session, storage_writer,
                                                      knowledge_base_object)

        plugin = tagging.TaggingAnalysisPlugin()
        plugin.SetAndLoadTagFile(tag_file_path)
        plugin.ExamineEvent(mediator, event, event_data, event_data_stream)

        analysis_report = plugin.CompileReport(mediator)
        storage_writer.AddAttributeContainer(analysis_report)

        return storage_writer
示例#5
0
    def testWriteSerializedDict(self):
        """Tests the _WriteSerializedDict function."""
        output_mediator = self._CreateOutputMediator()

        formatters_directory_path = self._GetTestFilePath(['formatters'])
        output_mediator.ReadMessageFormattersFromDirectory(
            formatters_directory_path)

        formatting_helper = shared_json.JSONEventFormattingHelper(
            output_mediator)

        event, event_data, event_data_stream = (
            containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0]))

        expected_timestamp = shared_test_lib.CopyTimestampFromString(
            '2012-06-27 18:17:01')

        if sys.platform.startswith('win'):
            # The dict comparison is very picky on Windows hence we
            # have to make sure the drive letter is in the same case.
            expected_os_location = os.path.abspath('\\{0:s}'.format(
                os.path.join('cases', 'image.dd')))
        else:
            expected_os_location = '{0:s}{1:s}'.format(
                os.path.sep, os.path.join('cases', 'image.dd'))

        expected_json_dict = {
            '__container_type__':
            'event',
            '__type__':
            'AttributeContainer',
            'date_time': {
                '__class_name__': 'PosixTimeInMicroseconds',
                '__type__': 'DateTimeValues',
                'timestamp': 1340821021000000,
            },
            'data_type':
            'test:event',
            'display_name':
            'TSK:/var/log/syslog.1',
            'filename':
            '/var/log/syslog.1',
            'hostname':
            'ubuntu',
            'inode':
            '15',
            'message':
            ('Reporter <CRON> PID: |8442| (pam_unix(cron:session): session '
             'closed for user root)'),
            'pathspec': {
                '__type__': 'PathSpec',
                'type_indicator': 'TSK',
                'location': '/var/log/syslog.1',
                'inode': 15,
                'parent': {
                    '__type__': 'PathSpec',
                    'type_indicator': 'OS',
                    'location': expected_os_location,
                }
            },
            'text': ('Reporter <CRON> PID: |8442| (pam_unix(cron:session): '
                     'session\n closed for user root)'),
            'timestamp':
            expected_timestamp,
            'timestamp_desc':
            definitions.TIME_DESCRIPTION_UNKNOWN,
            'username':
            '******',
        }
        json_dict = formatting_helper._WriteSerializedDict(
            event, event_data, event_data_stream, None)

        self.assertEqual(json_dict, expected_json_dict)