def testGetAttributeContainerByIndex(self): """Tests the GetAttributeContainerByIndex function.""" event_data_stream = events.EventDataStream() with shared_test_lib.TempDirectory() as temp_directory: test_path = os.path.join(temp_directory, 'plaso.sqlite') test_store = sqlite_file.SQLiteStorageFile() test_store.Open(path=test_path, read_only=False) try: container = test_store.GetAttributeContainerByIndex( event_data_stream.CONTAINER_TYPE, 0) self.assertIsNone(container) test_store.AddAttributeContainer(event_data_stream) container = test_store.GetAttributeContainerByIndex( event_data_stream.CONTAINER_TYPE, 0) self.assertIsNotNone(container) with self.assertRaises(IOError): test_store.GetAttributeContainerByIndex('bogus', 0) finally: test_store.Close()
def testGetAttributeContainerByIdentifier(self): """Tests the GetAttributeContainerByIdentifier function.""" event_data_stream = events.EventDataStream() with shared_test_lib.TempDirectory() as temp_directory: test_path = os.path.join(temp_directory, 'plaso.sqlite') test_store = sqlite_file.SQLiteStorageFile() test_store.Open(path=test_path, read_only=False) try: test_store.AddAttributeContainer(event_data_stream) identifier = event_data_stream.GetIdentifier() container = test_store.GetAttributeContainerByIdentifier( event_data_stream.CONTAINER_TYPE, identifier) self.assertIsNotNone(container) identifier.sequence_number = 99 container = test_store.GetAttributeContainerByIdentifier( event_data_stream.CONTAINER_TYPE, identifier) self.assertIsNone(container) finally: test_store.Close()
def testAddAttributeContainer(self): """Tests the AddAttributeContainer function.""" event_data_stream = events.EventDataStream() with shared_test_lib.TempDirectory() as temp_directory: test_path = os.path.join(temp_directory, 'plaso.sqlite') test_store = sqlite_file.SQLiteStorageFile() test_store.Open(path=test_path, read_only=False) try: number_of_containers = test_store.GetNumberOfAttributeContainers( event_data_stream.CONTAINER_TYPE) self.assertEqual(number_of_containers, 0) test_store.AddAttributeContainer(event_data_stream) number_of_containers = test_store.GetNumberOfAttributeContainers( event_data_stream.CONTAINER_TYPE) self.assertEqual(number_of_containers, 1) finally: test_store.Close() with self.assertRaises(IOError): test_store.AddAttributeContainer(event_data_stream)
def testCacheAttributeContainerByIndex(self): """Tests the _CacheAttributeContainerByIndex function.""" event_data_stream = events.EventDataStream() with shared_test_lib.TempDirectory(): test_store = sqlite_file.SQLiteStorageFile() self.assertEqual(len(test_store._attribute_container_cache), 0) test_store._CacheAttributeContainerByIndex(event_data_stream, 0) self.assertEqual(len(test_store._attribute_container_cache), 1)
def testGetAttributeNames(self): """Tests the GetAttributeNames function.""" attribute_container = events.EventDataStream() expected_attribute_names = [ 'file_entropy', 'md5_hash', 'path_spec', 'sha1_hash', 'sha256_hash', 'yara_match' ] attribute_names = sorted(attribute_container.GetAttributeNames()) self.assertEqual(attribute_names, expected_attribute_names)
def testGetAttributeContainerNextSequenceNumber(self): """Tests the _GetAttributeContainerNextSequenceNumber function.""" event_data_stream = events.EventDataStream() test_store = interface.BaseStore() sequence_number = test_store._GetAttributeContainerNextSequenceNumber( event_data_stream.CONTAINER_TYPE) self.assertEqual(sequence_number, 1) sequence_number = test_store._GetAttributeContainerNextSequenceNumber( event_data_stream.CONTAINER_TYPE) self.assertEqual(sequence_number, 2)
def testCreateAttributeContainerTable(self): """Tests the _CreateAttributeContainerTable function.""" event_data_stream = events.EventDataStream() with shared_test_lib.TempDirectory() as temp_directory: test_path = os.path.join(temp_directory, 'plaso.sqlite') test_store = sqlite_file.SQLiteStorageFile() test_store.Open(path=test_path, read_only=False) with self.assertRaises(IOError): test_store._CreateAttributeContainerTable( event_data_stream.CONTAINER_TYPE) test_store.Close()
def testAnalyzeFileObject(self): """Tests the _AnalyzeFileObject function.""" knowledge_base_values = {'year': 2016} session = sessions.Session() storage_writer = fake_writer.FakeStorageWriter(session) knowledge_base_object = knowledge_base.KnowledgeBase() if knowledge_base_values: for identifier, value in knowledge_base_values.items(): knowledge_base_object.SetValue(identifier, value) resolver_context = context.Context() mediator = parsers_mediator.ParserMediator( storage_writer, knowledge_base_object, preferred_year=2016, resolver_context=resolver_context) extraction_worker = worker.EventExtractionWorker() test_analyzer = analyzers_manager_test.TestAnalyzer() self.assertEqual(len(test_analyzer.GetResults()), 0) extraction_worker._analyzers = [test_analyzer] storage_writer.Open() storage_writer.WriteSessionStart() file_entry = self._GetTestFileEntry(['ímynd.dd']) mediator.SetFileEntry(file_entry) file_object = file_entry.GetFileObject() display_name = mediator.GetDisplayName() event_data_stream = events.EventDataStream() try: extraction_worker._AnalyzeFileObject(file_object, display_name, event_data_stream) finally: file_object.close() storage_writer.WriteSessionCompletion() storage_writer.Close() self.assertIsNotNone(event_data_stream) event_attribute = getattr(event_data_stream, 'test_result', None) self.assertEqual(event_attribute, 'is_vegetable')
def testAnalyzeDataStream(self): """Tests the _AnalyzeDataStream function.""" knowledge_base_values = {'year': 2016} session = sessions.Session() storage_writer = fake_writer.FakeStorageWriter() knowledge_base_object = knowledge_base.KnowledgeBase() if knowledge_base_values: for identifier, value in knowledge_base_values.items(): knowledge_base_object.SetValue(identifier, value) resolver_context = context.Context() parser_mediator = parsers_mediator.ParserMediator( knowledge_base_object, resolver_context=resolver_context) parser_mediator.SetPreferredYear(2016) parser_mediator.SetStorageWriter(storage_writer) extraction_worker = worker.EventExtractionWorker() test_analyzer = analyzers_manager_test.TestAnalyzer() self.assertEqual(len(test_analyzer.GetResults()), 0) extraction_worker._analyzers = [test_analyzer] storage_writer.Open() session_start = session.CreateSessionStart() storage_writer.AddAttributeContainer(session_start) file_entry = self._GetTestFileEntry(['syslog.tgz']) parser_mediator.SetFileEntry(file_entry) display_name = parser_mediator.GetDisplayName() event_data_stream = events.EventDataStream() extraction_worker._AnalyzeDataStream(file_entry, '', display_name, event_data_stream) session_completion = session.CreateSessionCompletion() storage_writer.AddAttributeContainer(session_completion) storage_writer.Close() self.assertIsNotNone(event_data_stream) event_attribute = getattr(event_data_stream, 'test_result', None) self.assertEqual(event_attribute, 'is_vegetable')
def testHasAttributeContainers(self): """Tests the HasAttributeContainers function.""" event_data_stream = events.EventDataStream() test_store = fake_store.FakeStore() test_store.Open() result = test_store.HasAttributeContainers(event_data_stream.CONTAINER_TYPE) self.assertFalse(result) test_store.AddAttributeContainer(event_data_stream) result = test_store.HasAttributeContainers(event_data_stream.CONTAINER_TYPE) self.assertTrue(result) test_store.Close()
def testGetCachedAttributeContainer(self): """Tests the _GetCachedAttributeContainer function.""" event_data_stream = events.EventDataStream() with shared_test_lib.TempDirectory(): test_store = sqlite_file.SQLiteStorageFile() attribute_container = test_store._GetCachedAttributeContainer( event_data_stream.CONTAINER_TYPE, 1) self.assertIsNone(attribute_container) test_store._CacheAttributeContainerByIndex(event_data_stream, 1) attribute_container = test_store._GetCachedAttributeContainer( event_data_stream.CONTAINER_TYPE, 1) self.assertIsNotNone(attribute_container)
def testGetAttributeContainers(self): """Tests the GetAttributeContainers method.""" redis_client = self._CreateRedisClient() session = sessions.Session() task = tasks.Task(session_identifier=session.identifier) test_store = redis_store.RedisStore( storage_type=definitions.STORAGE_TYPE_TASK) test_store.Open(redis_client=redis_client, session_identifier=task.session_identifier, task_identifier=task.identifier) try: event_data_stream = events.EventDataStream() event_data_stream.md5_hash = '8f0bf95a7959baad9666b21a7feed79d' containers = list( test_store.GetAttributeContainers( event_data_stream.CONTAINER_TYPE)) self.assertEqual(len(containers), 0) test_store.AddAttributeContainer(event_data_stream) containers = list( test_store.GetAttributeContainers( event_data_stream.CONTAINER_TYPE)) self.assertEqual(len(containers), 1) filter_expression = 'md5_hash == "8f0bf95a7959baad9666b21a7feed79d"' containers = list( test_store.GetAttributeContainers( event_data_stream.CONTAINER_TYPE, filter_expression=filter_expression)) self.assertEqual(len(containers), 1) filter_expression = 'md5_hash != "8f0bf95a7959baad9666b21a7feed79d"' containers = list( test_store.GetAttributeContainers( event_data_stream.CONTAINER_TYPE, filter_expression=filter_expression)) self.assertEqual(len(containers), 0) finally: test_store.Close() self._RemoveSessionData(redis_client, session.identifier)
def testWriteNewAttributeContainer(self): """Tests the _WriteNewAttributeContainer function.""" event_data_stream = events.EventDataStream() test_store = fake_store.FakeStore() test_store.Open() number_of_containers = test_store.GetNumberOfAttributeContainers( event_data_stream.CONTAINER_TYPE) self.assertEqual(number_of_containers, 0) test_store._WriteNewAttributeContainer(event_data_stream) number_of_containers = test_store.GetNumberOfAttributeContainers( event_data_stream.CONTAINER_TYPE) self.assertEqual(number_of_containers, 1) test_store.Close()
def testGetAttributeContainerByIndex(self): """Tests the GetAttributeContainerByIndex function.""" event_data_stream = events.EventDataStream() test_store = fake_store.FakeStore() test_store.Open() container = test_store.GetAttributeContainerByIndex( event_data_stream.CONTAINER_TYPE, 0) self.assertIsNone(container) test_store.AddAttributeContainer(event_data_stream) container = test_store.GetAttributeContainerByIndex( event_data_stream.CONTAINER_TYPE, 0) self.assertIsNotNone(container) test_store.Close()
def testGetAttributeContainers(self): """Tests the GetAttributeContainers function.""" event_data_stream = events.EventDataStream() event_data_stream.md5_hash = '8f0bf95a7959baad9666b21a7feed79d' with shared_test_lib.TempDirectory() as temp_directory: test_path = os.path.join(temp_directory, 'plaso.sqlite') test_store = sqlite_file.SQLiteStorageFile() test_store.Open(path=test_path, read_only=False) try: containers = list( test_store.GetAttributeContainers( event_data_stream.CONTAINER_TYPE)) self.assertEqual(len(containers), 0) test_store.AddAttributeContainer(event_data_stream) containers = list( test_store.GetAttributeContainers( event_data_stream.CONTAINER_TYPE)) self.assertEqual(len(containers), 1) filter_expression = 'md5_hash == "8f0bf95a7959baad9666b21a7feed79d"' containers = list( test_store.GetAttributeContainers( event_data_stream.CONTAINER_TYPE, filter_expression=filter_expression)) self.assertEqual(len(containers), 1) filter_expression = 'md5_hash != "8f0bf95a7959baad9666b21a7feed79d"' containers = list( test_store.GetAttributeContainers( event_data_stream.CONTAINER_TYPE, filter_expression=filter_expression)) self.assertEqual(len(containers), 0) with self.assertRaises(IOError): list(test_store.GetAttributeContainers('bogus')) finally: test_store.Close()
def _ParseFile(self, path_segments, parser, knowledge_base_object): """Parses a file using the parser. Args: path_segments (list[str]): path segments inside the test data directory. parser (BaseParser): parser. knowledge_base_object (KnowledgeBase): knowledge base. Returns: FakeStorageWriter: storage writer. Raises: SkipTest: if the path inside the test data directory does not exist and the test should be skipped. """ session = sessions.Session() storage_writer = fake_writer.FakeStorageWriter(session) storage_writer.Open() parser_mediator = parsers_mediator.ParserMediator( storage_writer, knowledge_base_object) file_entry = self._GetTestFileEntry(path_segments) parser_mediator.SetFileEntry(file_entry) event_data_stream = events.EventDataStream() parser_mediator.ProduceEventDataStream(event_data_stream) if isinstance(parser, parsers_interface.FileEntryParser): parser.Parse(parser_mediator) elif isinstance(parser, parsers_interface.FileObjectParser): file_object = file_entry.GetFileObject() try: parser.Parse(parser_mediator, file_object) finally: file_object.close() else: self.fail('Got unexpected parser type: {0!s}'.format(type(parser))) return storage_writer
def testGetNumberOfAttributeContainers(self): """Tests the GetNumberOfAttributeContainers function.""" event_data_stream = events.EventDataStream() test_store = fake_store.FakeStore() test_store.Open() try: number_of_containers = test_store.GetNumberOfAttributeContainers( event_data_stream.CONTAINER_TYPE) self.assertEqual(number_of_containers, 0) test_store.AddAttributeContainer(event_data_stream) number_of_containers = test_store.GetNumberOfAttributeContainers( event_data_stream.CONTAINER_TYPE) self.assertEqual(number_of_containers, 1) finally: test_store.Close()
def testAddAttributeContainer(self): """Tests the AddAttributeContainer function.""" event_data_stream = events.EventDataStream() test_store = fake_store.FakeStore() test_store.Open() number_of_containers = test_store.GetNumberOfAttributeContainers( event_data_stream.CONTAINER_TYPE) self.assertEqual(number_of_containers, 0) test_store.AddAttributeContainer(event_data_stream) number_of_containers = test_store.GetNumberOfAttributeContainers( event_data_stream.CONTAINER_TYPE) self.assertEqual(number_of_containers, 1) test_store.Close() with self.assertRaises(IOError): test_store.AddAttributeContainer(event_data_stream)
def testGetAttributeContainerByIndex(self): """Tests the GetAttributeContainerByIndex function.""" event_data_stream = events.EventDataStream() storage_writer = fake_writer.FakeStorageWriter() storage_writer.Open() container = storage_writer.GetAttributeContainerByIndex( event_data_stream.CONTAINER_TYPE, 0) self.assertIsNone(container) storage_writer.AddAttributeContainer(event_data_stream) container = storage_writer.GetAttributeContainerByIndex( event_data_stream.CONTAINER_TYPE, 0) self.assertIsNotNone(container) with self.assertRaises(IOError): storage_writer.GetAttributeContainerByIndex('bogus', 0) storage_writer.Close()
def testGetAttributeContainerByIdentifier(self): """Tests the GetAttributeContainerByIdentifier function.""" event_data_stream = events.EventDataStream() storage_writer = fake_writer.FakeStorageWriter() storage_writer.Open() storage_writer.AddAttributeContainer(event_data_stream) identifier = event_data_stream.GetIdentifier() container = storage_writer.GetAttributeContainerByIdentifier( event_data_stream.CONTAINER_TYPE, identifier) self.assertIsNotNone(container) identifier.sequence_number = 99 container = storage_writer.GetAttributeContainerByIdentifier( event_data_stream.CONTAINER_TYPE, identifier) self.assertIsNone(container) storage_writer.Close()
def testHasAttributeContainers(self): """Tests the HasAttributeContainers method.""" redis_client = self._CreateRedisClient() event_data_stream = events.EventDataStream() test_store = redis_store.RedisStore( storage_type=definitions.STORAGE_TYPE_TASK) test_store.Open(redis_client=redis_client) result = test_store.HasAttributeContainers( event_data_stream.CONTAINER_TYPE) self.assertFalse(result) test_store.AddAttributeContainer(event_data_stream) result = test_store.HasAttributeContainers( event_data_stream.CONTAINER_TYPE) self.assertTrue(result) test_store.Close()
def testAddAttributeContainer(self): """Tests the AddAttributeContainer function.""" event_data_stream = events.EventDataStream() storage_writer = fake_writer.FakeStorageWriter() storage_writer.Open() number_of_containers = storage_writer._attribute_containers_counter[ event_data_stream.CONTAINER_TYPE] self.assertEqual(number_of_containers, 0) storage_writer.AddAttributeContainer(event_data_stream) number_of_containers = storage_writer._attribute_containers_counter[ event_data_stream.CONTAINER_TYPE] self.assertEqual(number_of_containers, 1) storage_writer.Close() with self.assertRaises(IOError): storage_writer.AddAttributeContainer(event_data_stream)
def testGetNumberOfAttributeContainers(self): """Tests the GetNumberOfAttributeContainers function.""" redis_client = self._CreateRedisClient() event_data_stream = events.EventDataStream() test_store = redis_store.RedisStore( storage_type=definitions.STORAGE_TYPE_TASK) test_store.Open(redis_client=redis_client) number_of_containers = test_store.GetNumberOfAttributeContainers( event_data_stream.CONTAINER_TYPE) self.assertEqual(number_of_containers, 0) test_store.AddAttributeContainer(event_data_stream) number_of_containers = test_store.GetNumberOfAttributeContainers( event_data_stream.CONTAINER_TYPE) self.assertEqual(number_of_containers, 1) test_store.Close()
def __init__(self, output_mediator): """Initializes a field formatting helper. Args: output_mediator (OutputMediator): output mediator. """ event_data_stream = events.EventDataStream() super(FieldFormattingHelper, self).__init__() self._callback_functions = {} self._event_data_stream_field_names = event_data_stream.GetAttributeNames( ) self._event_tag_field_names = [] self._output_mediator = output_mediator self._source_mappings = {} for field_name, callback_name in self._FIELD_FORMAT_CALLBACKS.items(): if callback_name == '_FormatTag': self._event_tag_field_names.append(field_name) else: self._callback_functions[field_name] = getattr( self, callback_name, None)
def testWriteExistingAttributeContainer(self): """Tests the _WriteExistingAttributeContainer function.""" redis_client = self._CreateRedisClient() session = sessions.Session() task = tasks.Task(session_identifier=session.identifier) test_store = redis_store.RedisStore( storage_type=definitions.STORAGE_TYPE_TASK) test_store.Open(redis_client=redis_client, session_identifier=session.identifier, task_identifier=task.identifier) try: event_data_stream = events.EventDataStream() number_of_containers = test_store.GetNumberOfAttributeContainers( event_data_stream.CONTAINER_TYPE) self.assertEqual(number_of_containers, 0) with self.assertRaises(IOError): test_store._WriteExistingAttributeContainer(event_data_stream) test_store._WriteNewAttributeContainer(event_data_stream) number_of_containers = test_store.GetNumberOfAttributeContainers( event_data_stream.CONTAINER_TYPE) self.assertEqual(number_of_containers, 1) test_store._WriteExistingAttributeContainer(event_data_stream) number_of_containers = test_store.GetNumberOfAttributeContainers( event_data_stream.CONTAINER_TYPE) self.assertEqual(number_of_containers, 1) finally: test_store.Close() self._RemoveSessionData(redis_client, session.identifier)
def testReadAndWriteSerializedEventDataStream(self): """Test ReadSerialized and WriteSerialized of EventDataStream.""" test_file = self._GetTestFilePath(['ímynd.dd']) volume_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=test_file) path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_TSK, location='/', parent=volume_path_spec) expected_event_data_stream = events.EventDataStream() expected_event_data_stream.md5_hash = 'e3df0d2abd2c27fbdadfb41a47442520' expected_event_data_stream.path_spec = path_spec json_string = (json_serializer.JSONAttributeContainerSerializer. WriteSerialized(expected_event_data_stream)) self.assertIsNotNone(json_string) event_data_stream = (json_serializer.JSONAttributeContainerSerializer. ReadSerialized(json_string)) self.assertIsNotNone(event_data_stream) self.assertIsInstance(event_data_stream, events.EventDataStream) expected_event_data_stream_dict = { 'md5_hash': 'e3df0d2abd2c27fbdadfb41a47442520', 'path_spec': path_spec.comparable } event_data_stream_dict = event_data_stream.CopyToDict() path_spec = event_data_stream_dict.get('path_spec', None) if path_spec: event_data_stream_dict['path_spec'] = path_spec.comparable self.assertEqual(event_data_stream_dict, expected_event_data_stream_dict)
def testGetRedisHashName(self): """Tests the _GetRedisHashName function.""" redis_client = self._CreateRedisClient() session = sessions.Session() task = tasks.Task(session_identifier=session.identifier) event_data_stream = events.EventDataStream() test_store = redis_store.RedisStore( storage_type=definitions.STORAGE_TYPE_TASK) test_store.Open(redis_client=redis_client, session_identifier=task.session_identifier, task_identifier=task.identifier) redis_hash_name = test_store._GetRedisHashName( event_data_stream.CONTAINER_TYPE) expected_redis_hash_name = '{0:s}-{1:s}-{2:s}'.format( task.session_identifier, task.identifier, event_data_stream.CONTAINER_TYPE) self.assertEqual(redis_hash_name, expected_redis_hash_name)
def testProduceEventWithEventData(self): """Tests the ProduceEventWithEventData method.""" knowledge_base_object = knowledge_base.KnowledgeBase() parser_mediator = mediator.ParserMediator(knowledge_base_object) storage_writer = fake_writer.FakeStorageWriter() parser_mediator.SetStorageWriter(storage_writer) storage_writer.Open() event_data_stream = events.EventDataStream() parser_mediator.ProduceEventDataStream(event_data_stream) date_time = fake_time.FakeTime() event_with_timestamp = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_WRITTEN) event_with_timestamp.parser = 'test_parser' event_data = events.EventData() event_data.parser = 'test_parser' parser_mediator.ProduceEventWithEventData(event_with_timestamp, event_data) number_of_events = storage_writer.GetNumberOfAttributeContainers('event') self.assertEqual(number_of_events, 1) number_of_warnings = storage_writer.GetNumberOfAttributeContainers( 'extraction_warning') self.assertEqual(number_of_warnings, 0) number_of_warnings = storage_writer.GetNumberOfAttributeContainers( 'recovery_warning') self.assertEqual(number_of_warnings, 0) event_without_timestamp = events.EventObject() event_without_timestamp.parser = 'test_parser' with self.assertRaises(errors.InvalidEvent): parser_mediator.ProduceEventWithEventData( event_without_timestamp, event_data)
def testGetAttributeContainerByIdentifier(self): """Tests the GetAttributeContainerByIdentifier function.""" event_data_stream = events.EventDataStream() test_store = fake_store.FakeStore() test_store.Open() try: test_store.AddAttributeContainer(event_data_stream) identifier = event_data_stream.GetIdentifier() container = test_store.GetAttributeContainerByIdentifier( event_data_stream.CONTAINER_TYPE, identifier) self.assertIsNotNone(container) identifier.sequence_number = 99 container = test_store.GetAttributeContainerByIdentifier( event_data_stream.CONTAINER_TYPE, identifier) self.assertIsNone(container) finally: test_store.Close()
def testHasAttributeContainers(self): """Tests the HasAttributeContainers function.""" event_data_stream = events.EventDataStream() with shared_test_lib.TempDirectory() as temp_directory: test_path = os.path.join(temp_directory, 'plaso.sqlite') test_store = sqlite_file.SQLiteStorageFile() test_store.Open(path=test_path, read_only=False) result = test_store.HasAttributeContainers( event_data_stream.CONTAINER_TYPE) self.assertFalse(result) test_store.AddAttributeContainer(event_data_stream) result = test_store.HasAttributeContainers( event_data_stream.CONTAINER_TYPE) self.assertTrue(result) with self.assertRaises(ValueError): test_store.HasAttributeContainers('bogus') test_store.Close()