Exemple #1
0
  def testProcessPathSpecFile(self):
    """Tests the ProcessPathSpec function on a file."""
    knowledge_base_values = {u'year': 2016}
    session = sessions.Session()

    path_spec = self._GetTestFilePathSpec([u'syslog'])
    storage_writer = fake_storage.FakeStorageWriter(session)
    self._TestProcessPathSpec(
        storage_writer, path_spec, knowledge_base_values=knowledge_base_values)

    self.assertEqual(storage_writer.number_of_events, 19)
Exemple #2
0
  def testProcessPathSpecCompressedFileBZIP2(self):
    """Tests the ProcessPathSpec function on a bzip2 compressed file."""
    knowledge_base_values = {'year': 2016}
    session = sessions.Session()

    path_spec = self._GetTestFilePathSpec(['syslog.bz2'])
    storage_writer = fake_writer.FakeStorageWriter(session)
    self._TestProcessPathSpec(
        storage_writer, path_spec, knowledge_base_values=knowledge_base_values)

    self.assertEqual(storage_writer.number_of_events, 15)
Exemple #3
0
    def _ParseAndAnalyzeFile(self,
                             path_segments,
                             parser,
                             plugin,
                             knowledge_base_values=None):
        """Parses and analyzes a file using the parser and analysis plugin.

    Args:
      path_segments (list[str]): path segments inside the test data directory.
      parser (BaseParser): parser.
      plugin (AnalysisPlugin): plugin.
      knowledge_base_values (Optional[dict[str, str]]): knowledge base values.

    Returns:
      FakeStorageWriter: storage writer.

    Raises:
      SkipTest: if the path inside the test data directory does not exist and
          the test should be skipped.
    """
        session = sessions.Session()

        knowledge_base_object = self._SetUpKnowledgeBase(
            knowledge_base_values=knowledge_base_values)

        storage_writer = self._ParseFile(path_segments, parser,
                                         knowledge_base_object)

        mediator = analysis_mediator.AnalysisMediator(session,
                                                      knowledge_base_object)
        mediator.SetStorageWriter(storage_writer)

        for event in storage_writer.GetSortedEvents():
            event_data = None
            event_data_identifier = event.GetEventDataIdentifier()
            if event_data_identifier:
                event_data = storage_writer.GetAttributeContainerByIdentifier(
                    events.EventData.CONTAINER_TYPE, event_data_identifier)

            event_data_stream = None
            if event_data:
                event_data_stream_identifier = event_data.GetEventDataStreamIdentifier(
                )
                if event_data_stream_identifier:
                    event_data_stream = storage_writer.GetAttributeContainerByIdentifier(
                        events.EventDataStream.CONTAINER_TYPE,
                        event_data_stream_identifier)

            plugin.ExamineEvent(mediator, event, event_data, event_data_stream)

        analysis_report = plugin.CompileReport(mediator)
        storage_writer.AddAttributeContainer(analysis_report)

        return storage_writer
Exemple #4
0
    def testGetLatestYear(self):
        """Tests the GetLatestYear function."""
        session = sessions.Session()
        storage_writer = fake_writer.FakeStorageWriter(session)
        knowledge_base_object = knowledge_base.KnowledgeBase()
        parser_mediator = mediator.ParserMediator(storage_writer,
                                                  knowledge_base_object)

        expected_latest_year = parser_mediator.GetCurrentYear()
        latest_year = parser_mediator.GetLatestYear()
        self.assertEqual(latest_year, expected_latest_year)
Exemple #5
0
  def testAnalyzeEvents(self):
    """Tests the AnalyzeEvents function."""
    storage_file_path = self._GetTestFilePath(['psort_test.plaso'])

    session = sessions.Session()
    knowledge_base_object = knowledge_base.KnowledgeBase()

    formatter_mediator = formatters_mediator.FormatterMediator()
    formatter_mediator.SetPreferredLanguageIdentifier('en-US')

    output_mediator_object = output_mediator.OutputMediator(
        knowledge_base_object, formatter_mediator)

    output_module = null.NullOutputModule(output_mediator_object)

    data_location = ''
    analysis_plugin = tagging.TaggingAnalysisPlugin()
    analysis_plugins = {'tagging': analysis_plugin}
    # TODO: set tag file.

    configuration = configurations.ProcessingConfiguration()

    test_engine = psort.PsortMultiProcessEngine()

    with shared_test_lib.TempDirectory() as temp_directory:
      temp_file = os.path.join(temp_directory, 'storage.plaso')
      shutil.copyfile(storage_file_path, temp_file)

      storage_writer = storage_factory.StorageFactory.CreateStorageWriter(
          definitions.DEFAULT_STORAGE_FORMAT, session, temp_file)

      counter = test_engine.AnalyzeEvents(
          knowledge_base_object, storage_writer, output_module, data_location,
          analysis_plugins, configuration)

    # TODO: assert if tests were successful.
    _ = counter

    test_filter = filters_test_lib.TestEventFilter()

    with shared_test_lib.TempDirectory() as temp_directory:
      temp_file = os.path.join(temp_directory, 'storage.plaso')
      shutil.copyfile(storage_file_path, temp_file)

      storage_writer = storage_factory.StorageFactory.CreateStorageWriter(
          definitions.DEFAULT_STORAGE_FORMAT, session, temp_file)

      counter = test_engine.AnalyzeEvents(
          knowledge_base_object, storage_writer, data_location,
          analysis_plugins, configuration, event_filter=test_filter)

    # TODO: assert if tests were successful.
    _ = counter
Exemple #6
0
  def testProcessPathSpec(self):
    """Tests the ProcessPathSpec function on an archive file."""
    knowledge_base_values = {'year': 2016}
    session = sessions.Session()

    test_file_path = self._GetTestFilePath(['syslog.tar'])
    self._SkipIfPathNotExists(test_file_path)

    path_spec = path_spec_factory.Factory.NewPathSpec(
        dfvfs_definitions.TYPE_INDICATOR_OS, location=test_file_path)
    path_spec = path_spec_factory.Factory.NewPathSpec(
        dfvfs_definitions.TYPE_INDICATOR_TAR, location='/syslog',
        parent=path_spec)

    storage_writer = fake_writer.FakeStorageWriter(session)

    expected_event_counters = {
        'fs:stat': 1,
        'syslog:cron:task_run': 3,
        'syslog:line': 9}

    self._TestProcessPathSpec(
        storage_writer, path_spec, expected_event_counters,
        knowledge_base_values=knowledge_base_values)

    # Process an archive file without "process archive files" mode.
    path_spec = self._GetTestFilePathSpec(['syslog.tar'])
    storage_writer = fake_writer.FakeStorageWriter(session)

    # Typically there are 3 filestat events, but there can be 4 on platforms
    # that support os.stat_result st_birthtime.
    expected_event_counters = {
        'fs:stat': [3, 4]}

    self._TestProcessPathSpec(
        storage_writer, path_spec, expected_event_counters,
        knowledge_base_values=knowledge_base_values)

    # Process an archive file with "process archive files" mode.
    path_spec = self._GetTestFilePathSpec(['syslog.tar'])
    storage_writer = fake_writer.FakeStorageWriter(session)

    # Typically there are 3 filestat events, but there can be 4 on platforms
    # that support os.stat_result st_birthtime. There is 1 additional filestat
    # event from the .tar file.
    expected_event_counters = {
        'fs:stat': [4, 5],
        'syslog:cron:task_run': 3,
        'syslog:line': 9}

    self._TestProcessPathSpec(
        storage_writer, path_spec, expected_event_counters,
        knowledge_base_values=knowledge_base_values, process_archives=True)
Exemple #7
0
  def testCopyAttributesFromSessionCompletion(self):
    """Tests the CopyAttributesFromSessionCompletion function."""
    attribute_container = sessions.Session()

    session_completion = sessions.SessionCompletion(
        identifier=attribute_container.identifier)
    attribute_container.CopyAttributesFromSessionCompletion(session_completion)

    with self.assertRaises(ValueError):
      session_completion = sessions.SessionCompletion()
      attribute_container.CopyAttributesFromSessionCompletion(
          session_completion)
Exemple #8
0
    def testAnalyzeEvents(self):
        """Tests the AnalyzeEvents function."""
        storage_file_path = self._GetTestFilePath([u'psort_test.json.plaso'])

        session = sessions.Session()
        knowledge_base_object = knowledge_base.KnowledgeBase()

        formatter_mediator = formatters_mediator.FormatterMediator()
        formatter_mediator.SetPreferredLanguageIdentifier(u'en-US')

        output_mediator_object = output_mediator.OutputMediator(
            knowledge_base_object, formatter_mediator)

        output_module = null.NullOutputModule(output_mediator_object)

        data_location = u''
        analysis_plugin = tagging.TaggingAnalysisPlugin()
        # TODO: set tag file.

        test_engine = psort.PsortMultiProcessEngine()

        with shared_test_lib.TempDirectory() as temp_directory:
            temp_file = os.path.join(temp_directory, u'storage.plaso')
            shutil.copyfile(storage_file_path, temp_file)

            storage_writer = storage_zip_file.ZIPStorageFileWriter(
                session, temp_file)

            counter = test_engine.AnalyzeEvents(knowledge_base_object,
                                                storage_writer, output_module,
                                                data_location,
                                                [analysis_plugin])

        # TODO: assert if tests were successful.
        _ = counter

        test_filter = filters_test_lib.TestEventFilter()

        with shared_test_lib.TempDirectory() as temp_directory:
            temp_file = os.path.join(temp_directory, u'storage.plaso')
            shutil.copyfile(storage_file_path, temp_file)

            storage_writer = storage_zip_file.ZIPStorageFileWriter(
                session, temp_file)

            counter = test_engine.AnalyzeEvents(knowledge_base_object,
                                                storage_writer,
                                                data_location,
                                                [analysis_plugin],
                                                event_filter=test_filter)

        # TODO: assert if tests were successful.
        _ = counter
Exemple #9
0
    def testProducePreprocessingWarning(self):
        """Tests the ProducePreprocessingWarning method."""
        session = sessions.Session()
        storage_writer = fake_writer.FakeStorageWriter()
        knowledge_base_object = knowledge_base.KnowledgeBase()
        parser_mediator = mediator.PreprocessMediator(session, storage_writer,
                                                      knowledge_base_object)

        storage_writer.Open()

        parser_mediator.ProducePreprocessingWarning('test_plugin',
                                                    'test message')
Exemple #10
0
    def _ParseOLECFFileWithPlugin(self,
                                  path_segments,
                                  plugin,
                                  codepage='cp1252',
                                  knowledge_base_values=None):
        """Parses a file as an OLE compound file and returns an event generator.

    Args:
      path_segments (list[str]): path segments inside the test data directory.
      plugin (OLECFPlugin): OLE CF plugin.
      codepage (Optional[str]): codepage.
      knowledge_base_values (Optional[dict[str, object]]): knowledge base
          values.

    Returns:
      FakeStorageWriter: storage writer.

    Raises:
      SkipTest: if the path inside the test data directory does not exist and
          the test should be skipped.
    """
        session = sessions.Session()
        storage_writer = fake_writer.FakeStorageWriter(session)
        storage_writer.Open()

        file_entry = self._GetTestFileEntry(path_segments)
        parser_mediator = self._CreateParserMediator(
            storage_writer,
            file_entry=file_entry,
            knowledge_base_values=knowledge_base_values)

        file_object = file_entry.GetFileObject()

        try:
            olecf_file = pyolecf.file()
            olecf_file.set_ascii_codepage(codepage)
            olecf_file.open_file_object(file_object)

            # Get a list of all root items from the OLE CF file.
            root_item = olecf_file.root_item
            item_names = [item.name for item in root_item.sub_items]

            plugin.Process(parser_mediator,
                           root_item=root_item,
                           item_names=item_names)

            olecf_file.close()

        finally:
            file_object.close()

        return storage_writer
Exemple #11
0
    def testAnalyzeEventsWithEventFilter(self):
        """Tests the AnalyzeEvents function with an event filter."""
        test_file_path = self._GetTestFilePath(['psort_test.plaso'])
        self._SkipIfPathNotExists(test_file_path)

        test_tagging_file_path = self._GetTestFilePath(
            ['tagging_file', 'valid.txt'])
        self._SkipIfPathNotExists(test_tagging_file_path)

        session = sessions.Session()
        knowledge_base_object = knowledge_base.KnowledgeBase()

        data_location = ''

        analysis_plugin = tagging.TaggingAnalysisPlugin()
        analysis_plugin.SetAndLoadTagFile(test_tagging_file_path)

        analysis_plugins = {'tagging': analysis_plugin}

        configuration = configurations.ProcessingConfiguration()
        test_engine = analysis_engine.AnalysisMultiProcessEngine()
        test_filter = filters_test_lib.TestEventFilter()

        with shared_test_lib.TempDirectory() as temp_directory:
            temp_file = os.path.join(temp_directory, 'storage.plaso')
            shutil.copyfile(test_file_path, temp_file)

            storage_writer = storage_factory.StorageFactory.CreateStorageWriter(
                definitions.DEFAULT_STORAGE_FORMAT)

            storage_writer.Open(path=temp_file)

            try:
                number_of_reports = storage_writer.GetNumberOfAttributeContainers(
                    'analysis_report')
                self.assertEqual(number_of_reports, 2)

                test_engine.AnalyzeEvents(session,
                                          knowledge_base_object,
                                          storage_writer,
                                          data_location,
                                          analysis_plugins,
                                          configuration,
                                          event_filter=test_filter,
                                          storage_file_path=temp_directory)

                number_of_reports = storage_writer.GetNumberOfAttributeContainers(
                    'analysis_report')
                self.assertEqual(number_of_reports, 3)

            finally:
                storage_writer.Close()
Exemple #12
0
  def _ParseDatabaseFileWithPlugin(
      self, path_segments, plugin, knowledge_base_values=None,
      wal_path_segments=None):
    """Parses a file as a SQLite database with a specific plugin.

    This method will first test if a SQLite database contains the required
    tables and columns using plugin.CheckRequiredTablesAndColumns() and then
    extracts events using plugin.Process().

    Args:
      path_segments (list[str]): path segments inside the test data directory.
      plugin (SQLitePlugin): SQLite database plugin.
      knowledge_base_values (Optional[dict[str, object]]): knowledge base
          values.
      wal_path_segments (list[str]): path segments inside the test data
          directory of the SQLite WAL file.

    Returns:
      FakeStorageWriter: storage writer.

    Raises:
      SkipTest: if the path inside the test data directory does not exist and
          the test should be skipped.
    """
    session = sessions.Session()
    storage_writer = fake_writer.FakeStorageWriter()
    storage_writer.Open()

    file_entry, database = self._OpenDatabaseFile(
        path_segments, wal_path_segments=wal_path_segments)

    required_tables_and_column_exist = plugin.CheckRequiredTablesAndColumns(
        database)
    self.assertTrue(required_tables_and_column_exist)

    parser_mediator = self._CreateParserMediator(
        session, storage_writer, file_entry=file_entry,
        knowledge_base_values=knowledge_base_values)

    parser_mediator.SetFileEntry(file_entry)

    # AppendToParserChain needs to be run after SetFileEntry.
    parser_mediator.AppendToParserChain(plugin)

    try:
      cache = sqlite.SQLiteCache()

      plugin.Process(parser_mediator, cache=cache, database=database)
    finally:
      database.Close()

    return storage_writer
Exemple #13
0
  def testGetDisplayNameForPathSpec(self):
    """Tests the GetDisplayNameForPathSpec function."""
    session = sessions.Session()
    storage_writer = fake_storage.FakeStorageWriter(session)
    parsers_mediator = self._CreateParserMediator(storage_writer)

    test_path = self._GetTestFilePath([u'syslog.gz'])
    os_path_spec = path_spec_factory.Factory.NewPathSpec(
        dfvfs_definitions.TYPE_INDICATOR_OS, location=test_path)

    expected_display_name = u'OS:{0:s}'.format(test_path)
    display_name = parsers_mediator.GetDisplayNameForPathSpec(os_path_spec)
    self.assertEqual(display_name, expected_display_name)
Exemple #14
0
  def testAddEventSource(self):
    """Tests the AddEventSource function."""
    session = sessions.Session()
    event_source = event_sources.EventSource()

    storage_writer = fake_storage.FakeStorageWriter(session)
    storage_writer.Open()

    storage_writer.AddEventSource(event_source)

    storage_writer.Close()

    with self.assertRaises(IOError):
      storage_writer.AddEventSource(event_source)
Exemple #15
0
    def testAddAnalysisWarning(self):
        """Tests the AddAnalysisWarning function."""
        session = sessions.Session()
        warning = warnings.AnalysisWarning(message='Test analysis warning')

        storage_writer = fake_writer.FakeStorageWriter(session)
        storage_writer.Open()

        storage_writer.AddAnalysisWarning(warning)

        storage_writer.Close()

        with self.assertRaises(IOError):
            storage_writer.AddAnalysisWarning(warning)
Exemple #16
0
    def testParseHeader(self):
        """Tests the _ParseHeader function."""
        file_system_builder = fake_file_system_builder.FakeFileSystemBuilder()
        file_system_builder.AddFile('/cups_ipp', b'')

        test_path_spec = fake_path_spec.FakePathSpec(location='/cups_ipp')
        test_file_entry = file_system_builder.file_system.GetFileEntryByPathSpec(
            test_path_spec)

        session = sessions.Session()

        storage_writer = self._CreateStorageWriter()
        parser_mediator = self._CreateParserMediator(
            session, storage_writer, file_entry=test_file_entry)

        parser = cups_ipp.CupsIppParser()

        header_data = self._CreateHeaderData(parser)
        file_object = self._CreateFileObject('cups_ipp', header_data)

        parser._ParseHeader(parser_mediator, file_object)

        # Test with header data too small.
        file_object = self._CreateFileObject('cups_ipp', header_data[:-1])

        with self.assertRaises(errors.UnableToParseFile):
            parser._ParseHeader(parser_mediator, file_object)

        # Test with unsupported format version.
        header_map = parser._GetDataTypeMap('cups_ipp_header')

        header = header_map.CreateStructureValues(major_version=99,
                                                  minor_version=1,
                                                  operation_identifier=5,
                                                  request_identifier=0)
        header_data = header_map.FoldByteStream(header)
        file_object = self._CreateFileObject('cups_ipp', header_data)

        with self.assertRaises(errors.UnableToParseFile):
            parser._ParseHeader(parser_mediator, file_object)

        # Test with unsupported operation identifier.
        header = header_map.CreateStructureValues(major_version=1,
                                                  minor_version=1,
                                                  operation_identifier=99,
                                                  request_identifier=0)
        header_data = header_map.FoldByteStream(header)
        file_object = self._CreateFileObject('cups_ipp', header_data)

        parser._ParseHeader(parser_mediator, file_object)
Exemple #17
0
  def testInternalAnalyzeEvents(self):
    """Tests the _AnalyzeEvents function."""
    test_file_path = self._GetTestFilePath(['psort_test.plaso'])
    self._SkipIfPathNotExists(test_file_path)

    test_tagging_file_path = self._GetTestFilePath([
        'tagging_file', 'valid.txt'])
    self._SkipIfPathNotExists(test_tagging_file_path)

    session = sessions.Session()
    knowledge_base_object = knowledge_base.KnowledgeBase()

    analysis_plugin = tagging.TaggingAnalysisPlugin()
    analysis_plugin.SetAndLoadTagFile(test_tagging_file_path)

    analysis_plugins = {'tagging': analysis_plugin}

    configuration = configurations.ProcessingConfiguration()
    test_engine = analysis_engine.AnalysisMultiProcessEngine()

    with shared_test_lib.TempDirectory() as temp_directory:
      temp_file = os.path.join(temp_directory, 'storage.plaso')
      shutil.copyfile(test_file_path, temp_file)

      self._ReadSessionConfiguration(temp_file, knowledge_base_object)

      storage_writer = storage_factory.StorageFactory.CreateStorageWriter(
          definitions.DEFAULT_STORAGE_FORMAT)

      test_engine._processing_configuration = configuration
      test_engine._session = session

      test_engine._storage_file_path = temp_directory
      test_engine._StartTaskStorage(definitions.STORAGE_FORMAT_SQLITE)

      test_engine._StartAnalysisProcesses(analysis_plugins)

      storage_writer.Open(path=temp_file)

      try:
        events_counter = test_engine._AnalyzeEvents(
            storage_writer, analysis_plugins)
      finally:
        storage_writer.Close()

      test_engine._StopAnalysisProcesses()

    self.assertIsNotNone(events_counter)
    self.assertEqual(events_counter['Events filtered'], 0)
    self.assertEqual(events_counter['Events processed'], 38)
Exemple #18
0
    def testProcessSources(self):
        """Tests the ProcessSources function."""
        test_artifacts_path = self._GetTestFilePath(['artifacts'])
        self._SkipIfPathNotExists(test_artifacts_path)

        test_file_path = self._GetTestFilePath(['ímynd.dd'])
        self._SkipIfPathNotExists(test_file_path)

        registry = artifacts_registry.ArtifactDefinitionsRegistry()
        reader = artifacts_reader.YamlArtifactsReader()
        registry.ReadFromDirectory(reader, test_artifacts_path)

        test_engine = extraction_engine.SingleProcessEngine()
        resolver_context = context.Context()

        os_path_spec = path_spec_factory.Factory.NewPathSpec(
            dfvfs_definitions.TYPE_INDICATOR_OS, location=test_file_path)
        source_path_spec = path_spec_factory.Factory.NewPathSpec(
            dfvfs_definitions.TYPE_INDICATOR_TSK,
            location='/',
            parent=os_path_spec)

        session = sessions.Session()

        configuration = configurations.ProcessingConfiguration()
        configuration.parser_filter_expression = 'filestat'

        storage_writer = fake_writer.FakeStorageWriter()
        storage_writer.Open()

        try:
            test_engine.PreprocessSources(registry, [source_path_spec],
                                          session, storage_writer)

            test_engine.ProcessSources(session, [source_path_spec],
                                       storage_writer, resolver_context,
                                       configuration)

        finally:
            storage_writer.Close()

        self.assertEqual(storage_writer.number_of_events, 15)
        self.assertEqual(storage_writer.number_of_extraction_warnings, 0)
        self.assertEqual(storage_writer.number_of_recovery_warnings, 0)

        expected_parsers_counter = collections.Counter({
            'filestat': 15,
            'total': 15
        })
        self.assertEqual(session.parsers_counter, expected_parsers_counter)
Exemple #19
0
    def _ParseKeyWithPlugin(self,
                            registry_key,
                            plugin,
                            file_entry=None,
                            knowledge_base_values=None,
                            parser_chain=None,
                            timezone='UTC'):
        """Parses a key within a Windows Registry file using the plugin.

    Args:
      registry_key (dfwinreg.WinRegistryKey): Windows Registry Key.
      plugin (WindowsRegistryPlugin): Windows Registry plugin.
      file_entry (Optional[dfvfs.FileEntry]): file entry.
      knowledge_base_values (Optional[dict[str, str]]): knowledge base values.
      parser_chain (Optional[str]): parsing chain up to this point.
      timezone (Optional[str]): timezone.

    Returns:
      FakeStorageWriter: storage writer.
    """
        self.assertIsNotNone(registry_key)

        session = sessions.Session()
        storage_writer = fake_writer.FakeStorageWriter()
        storage_writer.Open()

        parser_mediator = self._CreateParserMediator(
            session,
            storage_writer,
            file_entry=file_entry,
            knowledge_base_values=knowledge_base_values,
            timezone=timezone)

        # Most tests aren't explicitly checking for parser chain values,
        # or setting them, so we'll just append the plugin name if no explicit
        # parser chain argument is supplied.
        if parser_chain is None:
            # AppendToParserChain needs to be run after SetFileEntry.
            parser_mediator.AppendToParserChain(plugin)

        else:
            # In the rare case that a test is checking for a particular chain, we
            # provide a way set it directly. There's no public API for this,
            # as access to the parser chain should be very infrequent.
            parser_mediator._parser_chain_components = parser_chain.split('/')

        plugin.Process(parser_mediator, registry_key)

        return storage_writer
Exemple #20
0
    def testAnalyzeFileObject(self):
        """Tests the _AnalyzeFileObject function."""
        knowledge_base_values = {'year': 2016}
        session = sessions.Session()

        storage_writer = fake_writer.FakeStorageWriter(session)

        knowledge_base_object = knowledge_base.KnowledgeBase()
        if knowledge_base_values:
            for identifier, value in knowledge_base_values.items():
                knowledge_base_object.SetValue(identifier, value)

        resolver_context = context.Context()
        mediator = parsers_mediator.ParserMediator(
            storage_writer,
            knowledge_base_object,
            preferred_year=2016,
            resolver_context=resolver_context)

        extraction_worker = worker.EventExtractionWorker()

        test_analyzer = analyzers_manager_test.TestAnalyzer()
        self.assertEqual(len(test_analyzer.GetResults()), 0)

        extraction_worker._analyzers = [test_analyzer]

        storage_writer.Open()
        storage_writer.WriteSessionStart()

        file_entry = self._GetTestFileEntry(['ímynd.dd'])
        mediator.SetFileEntry(file_entry)

        file_object = file_entry.GetFileObject()
        display_name = mediator.GetDisplayName()
        event_data_stream = events.EventDataStream()

        try:
            extraction_worker._AnalyzeFileObject(file_object, display_name,
                                                 event_data_stream)
        finally:
            file_object.close()

        storage_writer.WriteSessionCompletion()
        storage_writer.Close()

        self.assertIsNotNone(event_data_stream)

        event_attribute = getattr(event_data_stream, 'test_result', None)
        self.assertEqual(event_attribute, 'is_vegetable')
Exemple #21
0
  def testAddAnalysisReport(self):
    """Tests the AddAnalysisReport function."""
    session = sessions.Session()
    analysis_report = reports.AnalysisReport(
        plugin_name='test', text='test report')

    storage_writer = fake_storage.FakeStorageWriter(session)
    storage_writer.Open()

    storage_writer.AddAnalysisReport(analysis_report)

    storage_writer.Close()

    with self.assertRaises(IOError):
      storage_writer.AddAnalysisReport(analysis_report)
Exemple #22
0
  def testProcessTask(self):
    """Tests the _ProcessTask function."""
    session = sessions.Session()
    storage_writer = self._CreateStorageWriter(session)
    knowledge_base = self._CreateKnowledgeBase()
    configuration = configurations.ProcessingConfiguration()

    test_process = worker_process.WorkerProcess(
        None, storage_writer, knowledge_base, session.identifier, configuration,
        name='TestWorker')
    test_process._parser_mediator = self._CreateParserMediator(
        storage_writer, knowledge_base)

    task = tasks.Task(session_identifier=session.identifier)
    test_process._ProcessTask(task)
Exemple #23
0
    def testMergeFromStorage(self):
        """Tests the MergeFromStorage function."""
        session = sessions.Session()
        storage_writer = fake_storage.FakeStorageWriter(session)
        storage_writer.Open()

        test_file = self._GetTestFilePath([u'psort_test.json.plaso'])
        storage_reader = zip_file.ZIPStorageFileReader(test_file)
        storage_writer.MergeFromStorage(storage_reader)

        test_file = self._GetTestFilePath([u'pinfo_test.json.plaso'])
        storage_reader = zip_file.ZIPStorageFileReader(test_file)
        storage_writer.MergeFromStorage(storage_reader)

        storage_writer.Close()
Exemple #24
0
  def testAddError(self):
    """Tests the AddError function."""
    session = sessions.Session()
    extraction_error = errors.ExtractionError(
        message='Test extraction error')

    storage_writer = fake_storage.FakeStorageWriter(session)
    storage_writer.Open()

    storage_writer.AddError(extraction_error)

    storage_writer.Close()

    with self.assertRaises(IOError):
      storage_writer.AddError(extraction_error)
Exemple #25
0
  def testGetSortedEvents(self):
    """Tests the GetSortedEvents function."""
    session = sessions.Session()
    test_events = self._CreateTestEvents()

    storage_writer = fake_writer.FakeStorageWriter(session)
    storage_writer.Open()

    for event in test_events:
      storage_writer.AddEvent(event)

    events = list(storage_writer.GetSortedEvents())
    self.assertEqual(len(events), len(test_events))

    storage_writer.Close()
Exemple #26
0
    def _TagEvent(self, event, event_data, event_data_stream):
        """Tags an event.

    Args:
      event (Event): event.
      event_data (EventData): event data.
      event_data_stream (EventDataStream): event data stream.

    Returns:
      FakeStorageWriter: storage writer.

    Raises:
      SkipTest: if the tag file does not exist.
    """
        tag_file_path = self._GetDataFilePath([self._TAG_FILE])
        self._SkipIfPathNotExists(tag_file_path)

        session = sessions.Session()

        storage_writer = fake_writer.FakeStorageWriter()
        storage_writer.Open()

        if event_data_stream:
            storage_writer.AddAttributeContainer(event_data_stream)
            event_data_stream_identifier = event_data_stream.GetIdentifier()
            event_data.SetEventDataStreamIdentifier(
                event_data_stream_identifier)

        storage_writer.AddAttributeContainer(event_data)
        event_data_identifier = event_data.GetIdentifier()
        event.SetEventDataIdentifier(event_data_identifier)

        storage_writer.AddAttributeContainer(event)

        knowledge_base_object = knowledge_base.KnowledgeBase()

        mediator = analysis_mediator.AnalysisMediator(session,
                                                      knowledge_base_object)
        mediator.SetStorageWriter(storage_writer)

        plugin = tagging.TaggingAnalysisPlugin()
        plugin.SetAndLoadTagFile(tag_file_path)
        plugin.ExamineEvent(mediator, event, event_data, event_data_stream)

        analysis_report = plugin.CompileReport(mediator)
        storage_writer.AddAttributeContainer(analysis_report)

        return storage_writer
Exemple #27
0
    def _ParseOLECFFileWithPlugin(self,
                                  path_segments,
                                  plugin,
                                  codepage=u'cp1252',
                                  knowledge_base_values=None):
        """Parses a file as an OLE compound file and returns an event generator.

    Args:
      path_segments: a list of strings containinge the path segments inside
      plugin: an OLE CF plugin object (instance of OLECFPlugin).
      codepage: optional string containing the codepage.
      knowledge_base_values: optional dictionary containing the knowledge base
                             values.

    Returns:
      A storage writer object (instance of FakeStorageWriter).
    """
        session = sessions.Session()
        storage_writer = fake_storage.FakeStorageWriter(session)
        storage_writer.Open()

        file_entry = self._GetTestFileEntry(path_segments)
        parser_mediator = self._CreateParserMediator(
            storage_writer,
            file_entry=file_entry,
            knowledge_base_values=knowledge_base_values)

        file_object = file_entry.GetFileObject()

        try:
            olecf_file = pyolecf.file()
            olecf_file.set_ascii_codepage(codepage)
            olecf_file.open_file_object(file_object)

            # Get a list of all root items from the OLE CF file.
            root_item = olecf_file.root_item
            item_names = [item.name for item in root_item.sub_items]

            plugin.Process(parser_mediator,
                           root_item=root_item,
                           item_names=item_names)

            olecf_file.close()

        finally:
            file_object.close()

        return storage_writer
Exemple #28
0
    def testAnalyzeDataStream(self):
        """Tests the _AnalyzeDataStream function."""
        knowledge_base_values = {'year': 2016}
        session = sessions.Session()

        storage_writer = fake_writer.FakeStorageWriter()

        knowledge_base_object = knowledge_base.KnowledgeBase()
        if knowledge_base_values:
            for identifier, value in knowledge_base_values.items():
                knowledge_base_object.SetValue(identifier, value)

        resolver_context = context.Context()
        parser_mediator = parsers_mediator.ParserMediator(
            knowledge_base_object, resolver_context=resolver_context)
        parser_mediator.SetPreferredYear(2016)
        parser_mediator.SetStorageWriter(storage_writer)

        extraction_worker = worker.EventExtractionWorker()

        test_analyzer = analyzers_manager_test.TestAnalyzer()
        self.assertEqual(len(test_analyzer.GetResults()), 0)

        extraction_worker._analyzers = [test_analyzer]

        storage_writer.Open()

        session_start = session.CreateSessionStart()
        storage_writer.AddAttributeContainer(session_start)

        file_entry = self._GetTestFileEntry(['syslog.tgz'])
        parser_mediator.SetFileEntry(file_entry)

        display_name = parser_mediator.GetDisplayName()
        event_data_stream = events.EventDataStream()

        extraction_worker._AnalyzeDataStream(file_entry, '', display_name,
                                             event_data_stream)

        session_completion = session.CreateSessionCompletion()
        storage_writer.AddAttributeContainer(session_completion)

        storage_writer.Close()

        self.assertIsNotNone(event_data_stream)

        event_attribute = getattr(event_data_stream, 'test_result', None)
        self.assertEqual(event_attribute, 'is_vegetable')
Exemple #29
0
  def testAddEvent(self):
    """Tests the AddEvent function."""
    session = sessions.Session()
    test_events = self._CreateTestEvents()

    storage_writer = fake_storage.FakeStorageWriter(session)
    storage_writer.Open()

    event = None
    for event in test_events:
      storage_writer.AddEvent(event)

    storage_writer.Close()

    with self.assertRaises(IOError):
      storage_writer.AddEvent(event)
Exemple #30
0
    def testGetAttributeContainers(self):
        """Tests the GetAttributeContainers method."""
        redis_client = self._CreateRedisClient()

        session = sessions.Session()
        task = tasks.Task(session_identifier=session.identifier)

        test_store = redis_store.RedisStore(
            storage_type=definitions.STORAGE_TYPE_TASK)
        test_store.Open(redis_client=redis_client,
                        session_identifier=task.session_identifier,
                        task_identifier=task.identifier)

        try:
            event_data_stream = events.EventDataStream()
            event_data_stream.md5_hash = '8f0bf95a7959baad9666b21a7feed79d'

            containers = list(
                test_store.GetAttributeContainers(
                    event_data_stream.CONTAINER_TYPE))
            self.assertEqual(len(containers), 0)

            test_store.AddAttributeContainer(event_data_stream)

            containers = list(
                test_store.GetAttributeContainers(
                    event_data_stream.CONTAINER_TYPE))
            self.assertEqual(len(containers), 1)

            filter_expression = 'md5_hash == "8f0bf95a7959baad9666b21a7feed79d"'
            containers = list(
                test_store.GetAttributeContainers(
                    event_data_stream.CONTAINER_TYPE,
                    filter_expression=filter_expression))
            self.assertEqual(len(containers), 1)

            filter_expression = 'md5_hash != "8f0bf95a7959baad9666b21a7feed79d"'
            containers = list(
                test_store.GetAttributeContainers(
                    event_data_stream.CONTAINER_TYPE,
                    filter_expression=filter_expression))
            self.assertEqual(len(containers), 0)

        finally:
            test_store.Close()

            self._RemoveSessionData(redis_client, session.identifier)