def testStorageWriter(self): """Test the storage writer.""" test_event_objects = test_lib.CreateTestEventObjects() # The storage writer is normally run in a separate thread. # For the purpose of this test it has to be run in sequence, # hence the call to WriteEventObjects after all the event objects # have been queued up. # TODO: add upper queue limit. # A timeout is used to prevent the multi processing queue to close and # stop blocking the current process. test_queue = multi_process.MultiProcessingQueue(timeout=0.1) test_queue_producer = queue.ItemQueueProducer(test_queue) test_queue_producer.ProduceItems(test_event_objects) test_queue_producer.SignalAbort() with tempfile.NamedTemporaryFile() as temp_file: storage_writer = writer.FileStorageWriter(test_queue, temp_file) storage_writer.WriteEventObjects() z_file = zipfile.ZipFile(temp_file, 'r', zipfile.ZIP_DEFLATED) expected_z_filename_list = [ u'plaso_index.000001', u'plaso_meta.000001', u'plaso_proto.000001', u'plaso_timestamps.000001', u'serializer.txt' ] z_filename_list = sorted(z_file.namelist()) self.assertEqual(len(z_filename_list), 5) self.assertEqual(z_filename_list, expected_z_filename_list)
def testStorageWriter(self): """Test the storage writer.""" event_objects = test_lib.CreateTestEventObjects() session_start = sessions.SessionStart() preprocessing_object = event.PreprocessObject() with shared_test_lib.TempDirectory() as temp_directory: temp_file = os.path.join(temp_directory, u'storage.plaso') storage_writer = zip_file.ZIPStorageFileWriter( temp_file, preprocessing_object) storage_writer.Open() storage_writer.WriteSessionStart(session_start) for event_object in event_objects: storage_writer.AddEvent(event_object) storage_writer.WriteSessionCompletion() storage_writer.Close() storage_file = zipfile.ZipFile( temp_file, mode='r', compression=zipfile.ZIP_DEFLATED) expected_filename_list = sorted([ u'event_data.000001', u'event_index.000001', u'event_timestamps.000001', u'information.dump', u'metadata.txt', u'session_completion.000001', u'session_start.000001']) filename_list = sorted(storage_file.namelist()) self.assertEqual(len(filename_list), 7) self.assertEqual(filename_list, expected_filename_list)
def testAddEventObjects(self): """Tests the AddEventObjects function.""" test_event_objects = test_lib.CreateTestEventObjects() with shared_test_lib.TempDirectory() as temp_directory: temp_file = os.path.join(temp_directory, u'plaso.db') storage_file = zip_file.StorageFile(temp_file) storage_file.AddEventObjects(test_event_objects) storage_file.Close()
def testAddEventObject(self): """Tests the AddEventObject function.""" event_objects = test_lib.CreateTestEventObjects() with shared_test_lib.TempDirectory() as temp_directory: temp_file = os.path.join(temp_directory, u'storage.plaso') storage_file = zip_file.StorageFile(temp_file) for event_object in event_objects: storage_file.AddEventObject(event_object) storage_file.Close()
def testStoreTagging(self): """Tests the StoreTagging function.""" test_event_objects = test_lib.CreateTestEventObjects() test_event_tags = self._CreateTestEventTags() with shared_test_lib.TempDirectory() as temp_directory: temp_file = os.path.join(temp_directory, u'plaso.db') storage_file = zip_file.StorageFile(temp_file) storage_file.AddEventObjects(test_event_objects) storage_file.StoreTagging(test_event_tags[:-1]) storage_file.StoreTagging(test_event_tags[-1:]) storage_file.Close()
def _CreateTestStorageFileWithTags(self, path): """Creates a storage file with event tags for testing. Args: path: a string containing the path of the storage file. Returns: A storage file object (instance of StorageFile). """ test_event_objects = test_lib.CreateTestEventObjects() test_event_tags = self._CreateTestEventTags() storage_file = zip_file.StorageFile(path) storage_file.AddEventObjects(test_event_objects) storage_file.StoreTagging(test_event_tags[:-1]) storage_file.StoreTagging(test_event_tags[-1:]) storage_file.Close()
def testStorageWriter(self): """Test the storage writer.""" event_objects = test_lib.CreateTestEventObjects() # The storage writer is normally run in a separate thread. # For the purpose of this test it has to be run in sequence, # hence the call to WriteEventObjects after all the event objects # have been queued up. # TODO: add upper queue limit. # A timeout is used to prevent the multi processing queue to close and # stop blocking the current process. test_queue = multi_process.MultiProcessingQueue(timeout=0.1) test_queue_producer = plaso_queue.ItemQueueProducer(test_queue) test_queue_producer.ProduceItems(event_objects) test_queue_producer.SignalAbort() preprocessing_object = event.PreprocessObject() with shared_test_lib.TempDirectory() as temp_directory: temp_file = os.path.join(temp_directory, u'plaso.db') storage_writer = zip_file.ZIPStorageFileWriter( test_queue, temp_file, preprocessing_object) storage_writer.WriteEventObjects() storage_file = zipfile.ZipFile(temp_file, mode='r', compression=zipfile.ZIP_DEFLATED) expected_filename_list = [ u'information.dump', u'plaso_index.000001', u'plaso_proto.000001', u'plaso_timestamps.000001', u'serializer.txt' ] filename_list = sorted(storage_file.namelist()) self.assertEqual(len(filename_list), 5) self.assertEqual(filename_list, expected_filename_list)
def _CreateTestStorageFileWithTags(self, path): """Creates a storage file with event tags for testing. Args: path: a string containing the path of the storage file. Returns: A storage file object (instance of StorageFile). """ event_objects = test_lib.CreateTestEventObjects() event_tags = self._CreateTestEventTags() storage_file = zip_file.StorageFile(path) for event_object in event_objects: storage_file.AddEventObject(event_object) storage_file.StoreTagging(event_tags[:-1]) storage_file.StoreTagging(event_tags[-1:]) preprocessing_object = event.PreprocessObject() storage_file.WritePreprocessObject(preprocessing_object) storage_file.Close()
def testStorage(self): """Test the storage object.""" test_event_objects = test_lib.CreateTestEventObjects() formatter_mediator = formatters_mediator.FormatterMediator() event_objects = [] timestamps = [] group_mock = GroupMock() tags = [] tags_mock = [] groups = [] group_events = [] same_events = [] serializer = protobuf_serializer.ProtobufEventObjectSerializer with shared_test_lib.TempDirectory() as dirname: temp_file = os.path.join(dirname, 'plaso.db') store = storage.StorageFile(temp_file) store.AddEventObjects(test_event_objects) # Add tagging. tag_1 = event.EventTag() tag_1.store_index = 0 tag_1.store_number = 1 tag_1.comment = 'My comment' tag_1.color = 'blue' tags_mock.append(tag_1) tag_2 = event.EventTag() tag_2.store_index = 1 tag_2.store_number = 1 tag_2.tags = ['Malware'] tag_2.color = 'red' tags_mock.append(tag_2) tag_3 = event.EventTag() tag_3.store_number = 1 tag_3.store_index = 2 tag_3.comment = 'This is interesting' tag_3.tags = ['Malware', 'Benign'] tag_3.color = 'red' tags_mock.append(tag_3) store.StoreTagging(tags_mock) # Add additional tagging, second round. tag_4 = event.EventTag() tag_4.store_index = 1 tag_4.store_number = 1 tag_4.tags = ['Interesting'] store.StoreTagging([tag_4]) group_mock.AddGroup('Malicious', [(1, 1), (1, 2)], desc='Events that are malicious', color='red', first=1334940286000000, last=1334961526929596, cat='Malware') store.StoreGrouping(group_mock) store.Close() read_store = storage.StorageFile(temp_file, read_only=True) self.assertTrue(read_store.HasTagging()) self.assertTrue(read_store.HasGrouping()) for event_object in read_store.GetEntries(1): event_objects.append(event_object) timestamps.append(event_object.timestamp) if event_object.data_type == 'windows:registry:key_value': self.assertEqual(event_object.timestamp_desc, eventdata.EventTimestamp.WRITTEN_TIME) else: self.assertEqual(event_object.timestamp_desc, eventdata.EventTimestamp.WRITTEN_TIME) for tag in read_store.GetTagging(): event_object = read_store.GetTaggedEvent(tag) tags.append(event_object) groups = list(read_store.GetGrouping()) self.assertEqual(len(groups), 1) group_events = list(read_store.GetEventsFromGroup(groups[0])) # Read the same events that were put in the group, just to compare # against. event_object = read_store.GetEventObject(1, 1) serialized_event_object = serializer.WriteSerialized(event_object) same_events.append(serialized_event_object) event_object = read_store.GetEventObject(1, 2) serialized_event_object = serializer.WriteSerialized(event_object) same_events.append(serialized_event_object) self.assertEqual(len(event_objects), 4) self.assertEqual(len(tags), 4) expected_timestamp = timelib.Timestamp.CopyFromString( u'2009-04-05 12:27:39') self.assertEqual(tags[0].timestamp, expected_timestamp) self.assertEqual(tags[0].store_number, 1) self.assertEqual(tags[0].store_index, 0) self.assertEqual(tags[0].tag.comment, u'My comment') self.assertEqual(tags[0].tag.color, u'blue') msg, _ = formatters_manager.FormattersManager.GetMessageStrings( formatter_mediator, tags[0]) self.assertEqual(msg[0:10], u'This is a ') self.assertEqual(tags[1].tag.tags[0], 'Malware') msg, _ = formatters_manager.FormattersManager.GetMessageStrings( formatter_mediator, tags[1]) self.assertEqual(msg[0:15], u'[\\HKCU\\Windows\\') self.assertEqual(tags[2].tag.comment, u'This is interesting') self.assertEqual(tags[2].tag.tags[0], 'Malware') self.assertEqual(tags[2].tag.tags[1], 'Benign') self.assertEqual(tags[2].parser, 'UNKNOWN') # Test the newly added fourth tag, which should include data from # the first version as well. self.assertEqual(tags[3].tag.tags[0], 'Interesting') self.assertEqual(tags[3].tag.tags[1], 'Malware') expected_timestamps = [ 1238934459000000, 1334940286000000, 1334961526929596, 1335966206929596 ] self.assertEqual(timestamps, expected_timestamps) self.assertEqual(groups[0].name, u'Malicious') self.assertEqual(groups[0].category, u'Malware') self.assertEqual(groups[0].color, u'red') self.assertEqual(groups[0].description, u'Events that are malicious') self.assertEqual(groups[0].first_timestamp, 1334940286000000) self.assertEqual(groups[0].last_timestamp, 1334961526929596) self.assertEqual(len(group_events), 2) self.assertEqual(group_events[0].timestamp, 1334940286000000) self.assertEqual(group_events[1].timestamp, 1334961526929596) proto_group_events = [] for group_event in group_events: serialized_event_object = serializer.WriteSerialized(group_event) proto_group_events.append(serialized_event_object) self.assertEqual(same_events, proto_group_events)