def CreateEventFromValues(event_values): """Creates an event and event data from event values. Args: event_values (dict[str, str]): event values. Returns: tuple[EventObject, EventData, EventDataStream]: event, event data and event data stream for testing. """ copy_of_event_values = dict(event_values) event = events.EventObject() for attribute_name in ('timestamp', 'timestamp_desc'): attribute_value = copy_of_event_values.pop(attribute_name, None) if attribute_value is not None: if attribute_name == 'timestamp' and isinstance(attribute_value, str): attribute_value = shared_test_lib.CopyTimestampFromString( attribute_value) setattr(event, attribute_name, attribute_value) event.date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=event.timestamp) event_data_stream = events.EventDataStream() for attribute_name in ('path_spec', 'md5_hash', 'sha256_hash'): attribute_value = copy_of_event_values.pop(attribute_name, None) if attribute_value is not None: setattr(event_data_stream, attribute_name, attribute_value) event_data = events.EventData() event_data.CopyFromDict(copy_of_event_values) return event, event_data, event_data_stream
def testHasAttributeContainers(self): """Tests the _HasAttributeContainers function.""" event_data = events.EventData() with shared_test_lib.TempDirectory() as temp_directory: temp_file = os.path.join(temp_directory, 'plaso.sqlite') storage_file = sqlite_file.SQLiteStorageFile() storage_file.Open(path=temp_file, read_only=False) result = storage_file._HasAttributeContainers( storage_file._CONTAINER_TYPE_EVENT_DATA) self.assertFalse(result) storage_file._AddAttributeContainer( storage_file._CONTAINER_TYPE_EVENT_DATA, event_data) storage_file._WriteSerializedAttributeContainerList( storage_file._CONTAINER_TYPE_EVENT_DATA) result = storage_file._HasAttributeContainers( storage_file._CONTAINER_TYPE_EVENT_DATA) self.assertTrue(result) with self.assertRaises(ValueError): storage_file._HasAttributeContainers('bogus') storage_file.Close()
def testWriteSerializedAttributeContainerList(self): """Tests the _WriteSerializedAttributeContainerList function.""" event_data = events.EventData() event = events.EventObject() with shared_test_lib.TempDirectory() as temp_directory: temp_file = os.path.join(temp_directory, 'plaso.sqlite') storage_file = sqlite_file.SQLiteStorageFile() storage_file.Open(path=temp_file, read_only=False) storage_file._AddAttributeContainer( storage_file._CONTAINER_TYPE_EVENT_DATA, event_data) storage_file._WriteSerializedAttributeContainerList( storage_file._CONTAINER_TYPE_EVENT_DATA) event.timestamp = 0x7fffffffffffffff storage_file._AddSerializedEvent(event) storage_file._WriteSerializedAttributeContainerList( storage_file._CONTAINER_TYPE_EVENT) event.timestamp = 0x8000000000000000 storage_file._AddSerializedEvent(event) with self.assertRaises(OverflowError): storage_file._WriteSerializedAttributeContainerList( storage_file._CONTAINER_TYPE_EVENT) storage_file.Close()
def testProduceEventWithEventData(self): """Tests the ProduceEventWithEventData method.""" session = sessions.Session() storage_writer = fake_writer.FakeStorageWriter(session) knowledge_base_object = knowledge_base.KnowledgeBase() parser_mediator = mediator.ParserMediator(storage_writer, knowledge_base_object) storage_writer.Open() event_data_stream = events.EventDataStream() parser_mediator.ProduceEventDataStream(event_data_stream) date_time = fake_time.FakeTime() event_with_timestamp = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_WRITTEN) event_with_timestamp.parser = 'test_parser' event_data = events.EventData() event_data.parser = 'test_parser' parser_mediator.ProduceEventWithEventData(event_with_timestamp, event_data) self.assertEqual(storage_writer.number_of_warnings, 0) self.assertEqual(storage_writer.number_of_events, 1) event_without_timestamp = events.EventObject() event_without_timestamp.parser = 'test_parser' with self.assertRaises(errors.InvalidEvent): parser_mediator.ProduceEventWithEventData(event_without_timestamp, event_data)
def testGetAttributeContainers(self): """Tests the _GetAttributeContainers function.""" event_data = events.EventData() with shared_test_lib.TempDirectory() as temp_directory: temp_file = os.path.join(temp_directory, 'plaso.sqlite') storage_file = sqlite_file.SQLiteStorageFile() storage_file.Open(path=temp_file, read_only=False) containers = list( storage_file._GetAttributeContainers( storage_file._CONTAINER_TYPE_EVENT_DATA)) self.assertEqual(len(containers), 0) storage_file._AddAttributeContainer( storage_file._CONTAINER_TYPE_EVENT_DATA, event_data) storage_file._WriteSerializedAttributeContainerList( storage_file._CONTAINER_TYPE_EVENT_DATA) containers = list( storage_file._GetAttributeContainers( storage_file._CONTAINER_TYPE_EVENT_DATA)) self.assertEqual(len(containers), 1) with self.assertRaises(IOError): list(storage_file._GetAttributeContainers('bogus')) storage_file.Close()
def testMatch(self): """Tests the Match function.""" test_filter = event_filter.EventObjectFilter() test_filter.CompileFilter( 'timestamp is DATETIME("2020-12-23T15:00:00")') event = events.EventObject() event.timestamp = 1608735600000000 result = test_filter.Match(event, None, None, None) self.assertTrue(result) test_filter = event_filter.EventObjectFilter() test_filter.CompileFilter('filename contains PATH("etc/issue")') event_data = events.EventData() event_data.filename = '/usr/local/etc/issue' result = test_filter.Match(None, event_data, None, None) self.assertTrue(result) event_data.filename = '/etc/issue.net' result = test_filter.Match(None, event_data, None, None) self.assertFalse(result)
def CreateEventFromValues(event_values): """Creates an event and event data from event values. Args: event_values (dict[str, str]): event values. Returns: tuple[EventObject, EventData]: event and event data for testing. """ copy_of_event_values = dict(event_values) timestamp = copy_of_event_values.get('timestamp', None) if 'timestamp' in copy_of_event_values: del copy_of_event_values['timestamp'] timestamp_desc = copy_of_event_values.get('timestamp_desc', None) if 'timestamp_desc' in copy_of_event_values: del copy_of_event_values['timestamp_desc'] event = events.EventObject() event.timestamp = timestamp event.timestamp_desc = timestamp_desc event_data = events.EventData() event_data.CopyFromDict(copy_of_event_values) return event, event_data
def testCountStoredAttributeContainers(self): """Tests the _CountStoredAttributeContainers function.""" event_data = events.EventData() with shared_test_lib.TempDirectory() as temp_directory: temp_file = os.path.join(temp_directory, 'plaso.sqlite') storage_file = sqlite_file.SQLiteStorageFile() storage_file.Open(path=temp_file, read_only=False) number_of_containers = storage_file._GetNumberOfAttributeContainers( storage_file._CONTAINER_TYPE_EVENT_DATA) self.assertEqual(number_of_containers, 0) storage_file._AddAttributeContainer( storage_file._CONTAINER_TYPE_EVENT_DATA, event_data) storage_file._WriteSerializedAttributeContainerList( storage_file._CONTAINER_TYPE_EVENT_DATA) number_of_containers = storage_file._GetNumberOfAttributeContainers( storage_file._CONTAINER_TYPE_EVENT_DATA) self.assertEqual(number_of_containers, 1) with self.assertRaises(ValueError): storage_file._GetNumberOfAttributeContainers('bogus') # Test for a supported container type that does not have a table # present in the storage file. query = 'DROP TABLE {0:s}'.format( storage_file._CONTAINER_TYPE_EVENT_DATA) storage_file._cursor.execute(query) number_of_containers = storage_file._GetNumberOfAttributeContainers( storage_file._CONTAINER_TYPE_EVENT_DATA) self.assertEqual(number_of_containers, 0) storage_file.Close()
def testGetAttributeNames(self): """Tests the GetAttributeNames function.""" attribute_container = events.EventData() expected_attribute_names = ['data_type', 'offset', 'parser', 'query'] attribute_names = sorted(attribute_container.GetAttributeNames()) self.assertEqual(attribute_names, expected_attribute_names)
def testReadAndWriteSerializedEventData(self): """Test ReadSerialized and WriteSerialized of EventData.""" test_file = self._GetTestFilePath(['ímynd.dd']) volume_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=test_file) path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_TSK, location='/', parent=volume_path_spec) expected_event = events.EventData() expected_event.data_type = 'test:event2' expected_event.parser = 'test_parser' expected_event.pathspec = path_spec expected_event.empty_string = '' expected_event.zero_integer = 0 expected_event.integer = 34 expected_event.float = -122.082203542683 expected_event.string = 'Normal string' expected_event.unicode_string = 'And I am a unicorn.' expected_event.my_list = ['asf', 4234, 2, 54, 'asf'] expected_event.a_tuple = ('some item', [234, 52, 15]) expected_event.null_value = None json_string = ( json_serializer.JSONAttributeContainerSerializer.WriteSerialized( expected_event)) self.assertIsNotNone(json_string) event_data = ( json_serializer.JSONAttributeContainerSerializer.ReadSerialized( json_string)) self.assertIsNotNone(event_data) self.assertIsInstance(event_data, events.EventData) expected_data_event_dict = { 'a_tuple': ('some item', [234, 52, 15]), 'data_type': 'test:event2', 'empty_string': '', 'integer': 34, 'float': -122.082203542683, 'my_list': ['asf', 4234, 2, 54, 'asf'], 'parser': 'test_parser', 'pathspec': path_spec.comparable, 'string': 'Normal string', 'unicode_string': 'And I am a unicorn.', 'zero_integer': 0} event_data_dict = event_data.CopyToDict() path_spec = event_data_dict.get('pathspec', None) if path_spec: event_data_dict['pathspec'] = path_spec.comparable self.assertEqual(event_data_dict, expected_data_event_dict)
def testGetAttributeNames(self): """Tests the GetAttributeNames function.""" attribute_container = events.EventData() expected_attribute_names = [ '_event_data_stream_row_identifier', 'data_type', 'parser' ] attribute_names = sorted(attribute_container.GetAttributeNames()) self.assertEqual(attribute_names, expected_attribute_names)
def testGetAttributes(self): """Tests the GetAttributes function.""" attribute_container = events.EventData() with self.assertRaises(TypeError): attribute_container.error = b'bytes' attribute_container.GetAttributeValuesHash() with self.assertRaises(TypeError): attribute_container.error = {'key': 'value'} attribute_container.GetAttributeValuesHash()
def testAddAddEventData(self): """Tests the AddAddEventData function.""" event_data = events.EventData() with shared_test_lib.TempDirectory() as temp_directory: temp_file = os.path.join(temp_directory, 'plaso.sqlite') storage_file = sqlite_file.SQLiteStorageFile() storage_file.Open(path=temp_file, read_only=False) storage_file.AddEventData(event_data) storage_file.Close()
def testAddAttributeContainer(self): """Tests the _AddAttributeContainer function.""" event_data = events.EventData() with shared_test_lib.TempDirectory() as temp_directory: temp_file = os.path.join(temp_directory, 'plaso.sqlite') storage_file = sqlite_file.SQLiteStorageFile() storage_file.Open(path=temp_file, read_only=False) storage_file._AddAttributeContainer( storage_file._CONTAINER_TYPE_EVENT_DATA, event_data) storage_file.Close()
def GetEventData(self, data_type): """Retrieves the properties as event data. Args: data_type (str): event data type. Returns: EventData: event data. """ event_data = events.EventData(data_type=data_type) for property_name, property_value in iter(self._properties.items()): setattr(event_data, property_name, property_value) return event_data
def GetEventData(self): """Retrieves the properties as event data. Returns: EventData: event data. """ event_data = events.EventData(data_type=self._EVENT_DATA_TYPE) event_data.name = self._EVENT_DATA_NAME for property_name, property_value in self._properties.items(): if isinstance(property_value, bytes): property_value = repr(property_value) setattr(event_data, property_name, property_value) return event_data
def GetEventData(self, data_type): """Retrieves the properties as event data. Args: data_type (str): event data type. Returns: EventData: event data. """ event_data = events.EventData(data_type=data_type) for property_name, property_value in iter(self._properties.items()): if isinstance(property_value, py2to3.BYTES_TYPE): property_value = repr(property_value) setattr(event_data, property_name, property_value) return event_data
def testProduceEventWithEventData(self): """Tests the ProduceEventWithEventData method.""" session = sessions.Session() storage_writer = fake_writer.FakeStorageWriter(session) storage_writer.Open() parsers_mediator = self._CreateParserMediator(storage_writer) event_with_timestamp = events.EventObject() event_with_timestamp.timestamp = fake_time.FakeTime() event_data = events.EventData() parsers_mediator.ProduceEventWithEventData(event_with_timestamp, event_data) self.assertEqual(storage_writer.number_of_events, 1) event_without_timestamp = events.EventObject() with self.assertRaises(errors.InvalidEvent): parsers_mediator.ProduceEventWithEventData(event_without_timestamp, event_data)
def testReadAndWriteSerializedEventData(self): """Test ReadSerialized and WriteSerialized of EventData.""" expected_event_data = events.EventData() expected_event_data.data_type = 'test:event2' expected_event_data.parser = 'test_parser' expected_event_data.empty_string = '' expected_event_data.zero_integer = 0 expected_event_data.integer = 34 expected_event_data.float = -122.082203542683 expected_event_data.string = 'Normal string' expected_event_data.unicode_string = 'And I am a unicorn.' expected_event_data.my_list = ['asf', 4234, 2, 54, 'asf'] expected_event_data.a_tuple = ('some item', [234, 52, 15]) expected_event_data.null_value = None json_string = ( json_serializer.JSONAttributeContainerSerializer.WriteSerialized( expected_event_data)) self.assertIsNotNone(json_string) event_data = ( json_serializer.JSONAttributeContainerSerializer.ReadSerialized( json_string)) self.assertIsNotNone(event_data) self.assertIsInstance(event_data, events.EventData) expected_event_data_dict = { 'a_tuple': ('some item', [234, 52, 15]), 'data_type': 'test:event2', 'empty_string': '', 'integer': 34, 'float': -122.082203542683, 'my_list': ['asf', 4234, 2, 54, 'asf'], 'parser': 'test_parser', 'string': 'Normal string', 'unicode_string': 'And I am a unicorn.', 'zero_integer': 0} event_data_dict = event_data.CopyToDict() self.assertEqual(event_data_dict, expected_event_data_dict)
def testAddAttributeContainers(self): """Tests the _AddAttributeContainer method.""" event_data = events.EventData() store = redis_store.RedisStore() redis_client = self._GetRedisClient() store.Open(redis_client=redis_client) self.assertEqual( store._GetNumberOfAttributeContainers(event_data.CONTAINER_TYPE), 0) store._AddAttributeContainer(store._CONTAINER_TYPE_EVENT_DATA, event_data) self.assertEqual( store._GetNumberOfAttributeContainers(event_data.CONTAINER_TYPE), 1) has_containers = store._HasAttributeContainers(event_data.CONTAINER_TYPE) self.assertTrue(has_containers) store.Close()
def testRemoveAttributeContainer(self): """Tests the RemoveAttributeContainer method.""" event_data = events.EventData() store = redis_store.RedisStore() redis_client = self._GetRedisClient() store.Open(redis_client=redis_client) store._AddAttributeContainer(store._CONTAINER_TYPE_EVENT_DATA, event_data) self.assertEqual( store._GetNumberOfAttributeContainers(event_data.CONTAINER_TYPE), 1) identifier = event_data.GetIdentifier() store.RemoveAttributeContainer( store._CONTAINER_TYPE_EVENT_DATA, identifier) self.assertEqual( store._GetNumberOfAttributeContainers(event_data.CONTAINER_TYPE), 0) store.Close()
def testProduceEventWithEventData(self): """Tests the ProduceEventWithEventData method.""" session = sessions.Session() storage_writer = fake_writer.FakeStorageWriter(session) storage_writer.Open() parsers_mediator = self._CreateParserMediator(storage_writer) date_time = fake_time.FakeTime() event_with_timestamp = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_WRITTEN) event_data = events.EventData() parsers_mediator.ProduceEventWithEventData(event_with_timestamp, event_data) self.assertEqual(storage_writer.number_of_errors, 0) self.assertEqual(storage_writer.number_of_events, 1) event_without_timestamp = events.EventObject() with self.assertRaises(errors.InvalidEvent): parsers_mediator.ProduceEventWithEventData(event_without_timestamp, event_data)
def testProduceEventWithEventData(self): """Tests the ProduceEventWithEventData method.""" knowledge_base_object = knowledge_base.KnowledgeBase() parser_mediator = mediator.ParserMediator(knowledge_base_object) storage_writer = fake_writer.FakeStorageWriter() parser_mediator.SetStorageWriter(storage_writer) storage_writer.Open() event_data_stream = events.EventDataStream() parser_mediator.ProduceEventDataStream(event_data_stream) date_time = fake_time.FakeTime() event_with_timestamp = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_WRITTEN) event_with_timestamp.parser = 'test_parser' event_data = events.EventData() event_data.parser = 'test_parser' parser_mediator.ProduceEventWithEventData(event_with_timestamp, event_data) number_of_events = storage_writer.GetNumberOfAttributeContainers('event') self.assertEqual(number_of_events, 1) number_of_warnings = storage_writer.GetNumberOfAttributeContainers( 'extraction_warning') self.assertEqual(number_of_warnings, 0) number_of_warnings = storage_writer.GetNumberOfAttributeContainers( 'recovery_warning') self.assertEqual(number_of_warnings, 0) event_without_timestamp = events.EventObject() event_without_timestamp.parser = 'test_parser' with self.assertRaises(errors.InvalidEvent): parser_mediator.ProduceEventWithEventData( event_without_timestamp, event_data)
def testSetEventDataStreamIdentifier(self): """Tests the SetEventDataStreamIdentifier function.""" attribute_container = events.EventData() attribute_container.SetEventDataStreamIdentifier(None)
def testGetEventDataStreamIdentifier(self): """Tests the GetEventDataStreamIdentifier function.""" attribute_container = events.EventData() identifier = attribute_container.GetEventDataStreamIdentifier() self.assertIsNone(identifier)