def testAddAttributeContainer(self): """Tests the AddAttributeContainer function.""" event_data_stream = events.EventDataStream() with shared_test_lib.TempDirectory() as temp_directory: test_path = os.path.join(temp_directory, 'plaso.sqlite') test_store = sqlite_file.SQLiteStorageFile() test_store.Open(path=test_path, read_only=False) number_of_containers = test_store.GetNumberOfAttributeContainers( event_data_stream.CONTAINER_TYPE) self.assertEqual(number_of_containers, 0) test_store.AddAttributeContainer(event_data_stream) number_of_containers = test_store.GetNumberOfAttributeContainers( event_data_stream.CONTAINER_TYPE) self.assertEqual(number_of_containers, 1) test_store.Close() with self.assertRaises(IOError): test_store.AddAttributeContainer(event_data_stream)
def testExtractFileEntry(self): """Tests the _ExtractFileEntry function.""" test_file_path = self._GetTestFilePath(['ímynd.dd']) self._SkipIfPathNotExists(test_file_path) output_writer = test_lib.TestOutputWriter(encoding='utf-8') test_tool = image_export_tool.ImageExportTool() os_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=test_file_path) tsk_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_TSK, inode=16, location='/a_directory/another_file', parent=os_path_spec) non_existent_test_path = self._GetTestFilePath(['does_not_exist']) non_existent_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=non_existent_test_path) with shared_test_lib.TempDirectory() as temp_directory: test_tool._ExtractFileEntry( tsk_path_spec, temp_directory, output_writer) test_tool._ExtractFileEntry( non_existent_path_spec, temp_directory, output_writer)
def testGetAttributeContainerByIndex(self): """Tests the GetAttributeContainerByIndex function.""" event_data_stream = events.EventDataStream() with shared_test_lib.TempDirectory() as temp_directory: test_path = os.path.join(temp_directory, 'plaso.sqlite') test_store = sqlite_file.SQLiteStorageFile() test_store.Open(path=test_path, read_only=False) container = test_store.GetAttributeContainerByIndex( event_data_stream.CONTAINER_TYPE, 0) self.assertIsNone(container) test_store.AddAttributeContainer(event_data_stream) container = test_store.GetAttributeContainerByIndex( event_data_stream.CONTAINER_TYPE, 0) self.assertIsNotNone(container) with self.assertRaises(IOError): test_store.GetAttributeContainerByIndex('bogus', 0) test_store.Close()
def testGetStatus(self): """Tests the _GetStatus function.""" with shared_test_lib.TempDirectory() as temp_directory: configuration = configurations.ProcessingConfiguration() configuration.task_storage_path = temp_directory test_process = extraction_process.ExtractionWorkerProcess( None, None, None, configuration, name='TestWorker') status_attributes = test_process._GetStatus() self.assertIsNotNone(status_attributes) self.assertEqual(status_attributes['identifier'], 'TestWorker') self.assertEqual(status_attributes['last_activity_timestamp'], 0.0) task_storage_writer = self._CreateStorageWriter() knowledge_base = self._CreateKnowledgeBase() test_process._parser_mediator = self._CreateParserMediator( task_storage_writer, knowledge_base) status_attributes = test_process._GetStatus() self.assertIsNotNone(status_attributes) self.assertEqual(status_attributes['identifier'], 'TestWorker') self.assertEqual(status_attributes['last_activity_timestamp'], 0.0)
def testProcessSourcesExtractWithExtensionsFilter(self): """Tests the ProcessSources function with an extensions filter.""" output_writer = test_lib.TestOutputWriter(encoding='utf-8') test_tool = image_export_tool.ImageExportTool(output_writer=output_writer) options = test_lib.TestOptions() options.artifact_definitions_path = self._GetTestFilePath(['artifacts']) options.extensions_string = 'txt' options.image = self._GetTestFilePath(['image.qcow2']) options.quiet = True with shared_test_lib.TempDirectory() as temp_directory: options.path = temp_directory test_tool.ParseOptions(options) test_tool.ProcessSources() expected_extracted_files = sorted([ os.path.join(temp_directory, 'passwords.txt')]) extracted_files = self._RecursiveList(temp_directory) self.assertEqual(sorted(extracted_files), expected_extracted_files)
def testExtractPathSpecsFileSystem(self): """Tests the ExtractPathSpecs function on the file system.""" test_files = [ self._GetTestFilePath(['syslog.bz2']), self._GetTestFilePath(['syslog.tgz']), self._GetTestFilePath(['syslog.zip']), self._GetTestFilePath(['wtmp.1']) ] with shared_test_lib.TempDirectory() as temp_directory: for a_file in test_files: shutil.copy(a_file, temp_directory) source_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=temp_directory) resolver_context = context.Context() test_extractor = extractors.PathSpecExtractor() path_specs = list( test_extractor.ExtractPathSpecs( [source_path_spec], resolver_context=resolver_context)) self.assertEqual(len(path_specs), 4)
def testGetEventTagByIdentifier(self): """Tests the GetEventTagByIdentifier function.""" test_index = event_tag_index.EventTagIndex() with shared_test_lib.TempDirectory() as temp_directory: temp_file = os.path.join(temp_directory, 'storage.plaso') self._CreateTestStorageFileWithTags(temp_file) storage_file = sqlite_file.SQLiteStorageFile() storage_file.Open(path=temp_file) event_identifier = identifiers.SQLTableIdentifier('event', 1) event_tag = test_index.GetEventTagByIdentifier( storage_file, event_identifier) self.assertIsNotNone(event_tag) self.assertEqual(event_tag.comment, 'My comment') event_identifier = identifiers.SQLTableIdentifier('event', 99) event_tag = test_index.GetEventTagByIdentifier( storage_file, event_identifier) self.assertIsNone(event_tag) storage_file.Close()
def testStartAndStopProfiling(self): """Tests the _StartProfiling and _StopProfiling functions.""" with shared_test_lib.TempDirectory() as temp_directory: configuration = configurations.ProcessingConfiguration() configuration.profiling.directory = temp_directory configuration.profiling.profilers = set([ 'memory', 'parsers', 'processing', 'serializers', 'storage', 'task_queue' ]) test_process = worker_process.WorkerProcess(None, None, None, None, None, configuration, name='TestWorker') test_process._extraction_worker = TestEventExtractionWorker() test_process._StartProfiling(None) test_process._StartProfiling(configuration.profiling) test_process._StopProfiling()
def testProcessSourcesPartitionedImage(self): """Tests the ProcessSources function on a multi partition image.""" test_source = self._GetTestFilePath([u'multi_partition_image.vmdk']) with shared_test_lib.TempDirectory() as temp_directory: test_storage_file = os.path.join(temp_directory, u'test.plaso') options = cli_test_lib.TestOptions() # TODO: refactor to partitions. options.partition_number = u'all' options.output = test_storage_file options.quiet = True options.single_process = True options.status_view_mode = u'none' options.source = test_source self._test_tool.ParseOptions(options) self._test_tool.ProcessSources() output = self._output_writer.ReadOutput() # TODO: print summary and compare that against output. _ = output
def testGetSortedEvents(self): """Tests the GetSortedEvents function.""" with shared_test_lib.TempDirectory() as temp_directory: temp_file = os.path.join(temp_directory, 'plaso.sqlite') storage_file = sqlite_file.SQLiteStorageFile() storage_file.Open(path=temp_file, read_only=False) for event, event_data in containers_test_lib.CreateEventsFromValues( self._TEST_EVENTS): storage_file.AddEventData(event_data) event.SetEventDataIdentifier(event_data.GetIdentifier()) storage_file.AddEvent(event) storage_file.Close() storage_file = sqlite_file.SQLiteStorageFile() storage_file.Open(path=temp_file) test_events = list(storage_file.GetSortedEvents()) self.assertEqual(len(test_events), 4) storage_file.Close()
def testWriteFileEntry(self): """Tests the WriteFileEntry function.""" with test_lib.TempDirectory() as temp_directory: original_stdout = sys.stdout path = os.path.join(temp_directory, 'hashes.txt') with io.open(path, mode='wt', encoding='utf-8') as file_object: sys.stdout = file_object output_writer = list_file_entries.StdoutWriter() output_writer.Open() output_writer.WriteFileEntry('/password.txt') output_writer.Close() sys.stdout = original_stdout with io.open(path, mode='rb') as file_object: output = file_object.read() expected_output = '/password.txt'.encode('utf-8') self.assertEqual(output.rstrip(), expected_output)
def testMergeAttributeContainersWithDeserializationError(self): """Tests MergeAttributeContainers with a deserialization error.""" session = sessions.Session() with shared_test_lib.TempDirectory() as temp_directory: task_storage_path = os.path.join(temp_directory, 'task.sqlite') self._CreateTaskStorageFile( session, task_storage_path, self._TEST_EVENTS_WITH_DESERIALIZATION_ERROR) session_storage_path = os.path.join(temp_directory, 'plaso.sqlite') storage_writer = writer.SQLiteStorageFileWriter( session, session_storage_path) test_reader = merge_reader.SQLiteStorageMergeReader( storage_writer, task_storage_path) storage_writer.Open() result = test_reader.MergeAttributeContainers() self.assertTrue(result) storage_writer.Close()
def testHasAttributeContainers(self): """Tests the HasAttributeContainers function.""" event_data_stream = events.EventDataStream() with shared_test_lib.TempDirectory() as temp_directory: test_path = os.path.join(temp_directory, 'plaso.sqlite') test_store = sqlite_file.SQLiteStorageFile() test_store.Open(path=test_path, read_only=False) result = test_store.HasAttributeContainers( event_data_stream.CONTAINER_TYPE) self.assertFalse(result) test_store.AddAttributeContainer(event_data_stream) result = test_store.HasAttributeContainers( event_data_stream.CONTAINER_TYPE) self.assertTrue(result) result = test_store.HasAttributeContainers('bogus') self.assertFalse(result) test_store.Close()
def testGetAttributeContainerByIdentifier(self): """Tests the GetAttributeContainerByIdentifier function.""" event_data_stream = events.EventDataStream() with shared_test_lib.TempDirectory() as temp_directory: test_path = os.path.join(temp_directory, 'plaso.sqlite') test_store = sqlite_file.SQLiteStorageFile() test_store.Open(path=test_path, read_only=False) test_store.AddAttributeContainer(event_data_stream) identifier = event_data_stream.GetIdentifier() container = test_store.GetAttributeContainerByIdentifier( event_data_stream.CONTAINER_TYPE, identifier) self.assertIsNotNone(container) identifier.sequence_number = 99 container = test_store.GetAttributeContainerByIdentifier( event_data_stream.CONTAINER_TYPE, identifier) self.assertIsNone(container) test_store.Close()
def testProcessSources(self): """Tests the ProcessSources function.""" session = sessions.Session() test_front_end = extraction_frontend.ExtractionFrontend() test_file = self._GetTestFilePath([u'ímynd.dd']) volume_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=test_file) path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_TSK, location=u'/', parent=volume_path_spec) source_type = dfvfs_definitions.SOURCE_TYPE_STORAGE_MEDIA_IMAGE with shared_test_lib.TempDirectory() as temp_directory: storage_file_path = os.path.join(temp_directory, u'storage.plaso') storage_writer = storage_zip_file.ZIPStorageFileWriter( session, storage_file_path) test_front_end.ProcessSources(session, storage_writer, [path_spec], source_type) storage_file = storage_zip_file.ZIPStorageFile() try: storage_file.Open(path=storage_file_path) except IOError: self.fail(u'Unable to open storage file after processing.') # Make sure we can read events from the storage. event_objects = list(storage_file.GetEvents()) self.assertNotEqual(len(event_objects), 0) event_object = event_objects[0] self.assertEqual(event_object.data_type, u'fs:stat') self.assertEqual(event_object.filename, u'/lost+found')
def testExtractEventsFromSourcesOnVSSImage(self): """Tests the ExtractEventsFromSources function on VSS image.""" test_file_path = self._GetTestFilePath(['vsstest.qcow2']) self._SkipIfPathNotExists(test_file_path) options = self._CreateExtractionOptions(test_file_path) options.vss_stores = 'all' output_writer = test_lib.TestOutputWriter(encoding=self._OUTPUT_ENCODING) test_tool = log2timeline_tool.Log2TimelineTool(output_writer=output_writer) with shared_test_lib.TempDirectory() as temp_directory: options.storage_file = os.path.join(temp_directory, 'storage.plaso') options.storage_format = definitions.STORAGE_FORMAT_SQLITE options.task_storage_format = definitions.STORAGE_FORMAT_SQLITE test_tool.ParseOptions(options) test_tool.ExtractEventsFromSources() expected_output = [ '', 'Source path\t\t: {0:s}'.format(options.source), 'Source type\t\t: storage media image', 'Processing time\t\t: 00:00:00', '', 'Processing started.', 'Processing completed.', '', 'Number of warnings generated while extracting events: 3.', '', 'Use pinfo to inspect warnings in more detail.', '', ''] output = output_writer.ReadOutput() self._CheckOutput(output, expected_output)
def testParseOptions(self): """Tests the ParseOptions function.""" output_writer = test_lib.TestOutputWriter(encoding=u'utf-8') test_tool = psteal_tool.PstealTool(output_writer=output_writer) options = test_lib.TestOptions() options.artifact_definitions_path = self._GetTestFilePath([u'artifacts']) # Test when the output file is missing. expected_error = (u'Output format: dynamic requires an output file') with self.assertRaisesRegexp(errors.BadConfigOption, expected_error): test_tool.ParseOptions(options) options.write = u'dynamic.out' # Test when the source is missing. expected_error = u'Missing source path.' with self.assertRaisesRegexp(errors.BadConfigOption, expected_error): test_tool.ParseOptions(options) with shared_test_lib.TempDirectory() as temp_directory: options.source = self._GetTestFilePath([u'testdir']) options.write = os.path.join(temp_directory, u'dynamic.out') # Test when both source and output are specified. test_tool.ParseOptions(options) with open(options.write, 'w') as file_object: file_object.write(u'bogus') # Test when output file already exists. # Escape \ otherwise assertRaisesRegexp can error with: # error: bogus escape: u'\\1' expected_error = u'Output file already exists: {0:s}.'.format( options.write.replace(u'\\', u'\\\\')) with self.assertRaisesRegexp(errors.BadConfigOption, expected_error): test_tool.ParseOptions(options)
def testExtractEventsFromSourceVSSImage(self): """Tests the ExtractEventsFromSources function on an image with VSS.""" output_writer = test_lib.TestOutputWriter(encoding=u'utf-8') test_tool = psteal_tool.PstealTool(output_writer=output_writer) options = test_lib.TestOptions() options.artifact_definitions_path = self._GetTestFilePath([u'artifacts']) options.quiet = True options.single_process = True options.status_view_mode = u'none' options.source = self._GetTestFilePath([u'vsstest.qcow2']) options.vss_stores = u'all' with shared_test_lib.TempDirectory() as temp_directory: options.write = os.path.join(temp_directory, u'unused_output.txt') options.storage_file = os.path.join(temp_directory, u'storage.plaso') test_tool.ParseOptions(options) test_tool.ExtractEventsFromSources() expected_output = [ b'', b'Source path\t: {0:s}'.format(options.source.encode(u'utf-8')), b'Source type\t: storage media image', b'', b'Processing started.', b'Processing completed.', b'', b'Number of errors encountered while extracting events: 1.', b'', b'Use pinfo to inspect errors in more detail.', b'', b''] output = output_writer.ReadOutput() self.assertEqual(output.split(b'\n'), expected_output)
def testInternalExportEventsDeduplicate(self): """Tests the _ExportEvents function with deduplication.""" knowledge_base_object = knowledge_base.KnowledgeBase() output_writer = cli_test_lib.TestOutputWriter() formatter_mediator = formatters_mediator.FormatterMediator() output_mediator_object = output_mediator.OutputMediator( knowledge_base_object, formatter_mediator) output_module = TestOutputModule(output_mediator_object) output_module.SetOutputWriter(output_writer) test_engine = psort.PsortMultiProcessEngine() formatters_manager.FormattersManager.RegisterFormatter( TestEventFormatter) with shared_test_lib.TempDirectory() as temp_directory: temp_file = os.path.join(temp_directory, 'storage.plaso') self._CreateTestStorageFile(temp_file) storage_reader = storage_zip_file.ZIPStorageFileReader(temp_file) storage_reader.ReadPreprocessingInformation(knowledge_base_object) test_engine._ExportEvents(storage_reader, output_module) formatters_manager.FormattersManager.DeregisterFormatter( TestEventFormatter) lines = [] output = output_writer.ReadOutput() for line in output.split(b'\n'): lines.append(line) self.assertEqual(len(output_module.events), 15) self.assertEqual(len(output_module.macb_groups), 3)
def testOutput(self): with shared_test_lib.TempDirectory() as dirname: storage_file = os.path.join(dirname, u'plaso.plaso') # Copy events to pstorage dump. with storage.StorageFile(self.test_filename, read_only=True) as store: output_mediator = self._CreateOutputMediator( storage_object=store) formatter = pstorage.PlasoStorageOutputModule(output_mediator) formatter.SetFilehandle(storage_file) with interface.EventBuffer( formatter, check_dedups=False) as output_buffer: event_object = store.GetSortedEntry() while event_object: output_buffer.Append(event_object) event_object = store.GetSortedEntry() # Make sure original and dump have the same events. original = storage.StorageFile(self.test_filename, read_only=True) dump = storage.StorageFile(storage_file, read_only=True) event_object_original = original.GetSortedEntry() event_object_dump = dump.GetSortedEntry() original_list = [] dump_list = [] while event_object_original: original_list.append(event_object_original.EqualityString()) dump_list.append(event_object_dump.EqualityString()) event_object_original = original.GetSortedEntry() event_object_dump = dump.GetSortedEntry() self.assertFalse(event_object_dump) for original_str, dump_str in zip(sorted(original_list), sorted(dump_list)): self.assertEqual(original_str, dump_str)
def testProcessSources(self): """Tests the PreprocessSources and ProcessSources function.""" artifacts_path = shared_test_lib.GetTestFilePath(['artifacts']) self._SkipIfPathNotExists(artifacts_path) registry = artifacts_registry.ArtifactDefinitionsRegistry() reader = artifacts_reader.YamlArtifactsReader() registry.ReadFromDirectory(reader, artifacts_path) test_engine = task_engine.TaskMultiProcessEngine( maximum_number_of_tasks=100) test_file_path = self._GetTestFilePath(['ímynd.dd']) self._SkipIfPathNotExists(test_file_path) os_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=test_file_path) source_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_TSK, location='/', parent=os_path_spec) test_engine.PreprocessSources(registry, [source_path_spec]) session = sessions.Session() configuration = configurations.ProcessingConfiguration() configuration.parser_filter_expression = 'filestat' configuration.task_storage_format = definitions.STORAGE_FORMAT_SQLITE with shared_test_lib.TempDirectory() as temp_directory: temp_file = os.path.join(temp_directory, 'storage.plaso') storage_writer = sqlite_writer.SQLiteStorageFileWriter( session, temp_file) test_engine.ProcessSources(session, [source_path_spec], storage_writer, configuration)
def testInternalExportEvents(self): """Tests the _ExportEvents function.""" knowledge_base_object = knowledge_base.KnowledgeBase() formatter_mediator = formatters_mediator.FormatterMediator() output_mediator_object = output_mediator.OutputMediator( knowledge_base_object, formatter_mediator) output_module = TestOutputModule(output_mediator_object) test_engine = psort.PsortMultiProcessEngine() formatters_manager.FormattersManager.RegisterFormatter( formatters_test_lib.TestEventFormatter) try: with shared_test_lib.TempDirectory() as temp_directory: temp_file = os.path.join(temp_directory, 'storage.plaso') self._CreateTestStorageFile(temp_file) self._ReadSessionConfiguration(temp_file, knowledge_base_object) storage_reader = (storage_factory.StorageFactory. CreateStorageReaderForFile(temp_file)) storage_reader.ReadSystemConfiguration(knowledge_base_object) test_engine._ExportEvents(storage_reader, output_module, deduplicate_events=False) finally: formatters_manager.FormattersManager.DeregisterFormatter( formatters_test_lib.TestEventFormatter) self.assertEqual(len(output_module.events), 17) self.assertEqual(len(output_module.macb_groups), 3)
def testWriteHeader(self): """Tests the WriteHeader function.""" expected_header = [ 'datetime', 'timestamp_desc', 'source', 'source_long', 'message', 'parser', 'display_name', 'tag' ] with shared_test_lib.TempDirectory() as temp_directory: output_mediator = self._CreateOutputMediator() output_module = xlsx.XLSXOutputModule(output_mediator) xlsx_file = os.path.join(temp_directory, 'xlsx.out') output_module.Open(path=xlsx_file) output_module.WriteHeader() output_module.WriteFooter() output_module.Close() try: rows = self._GetSheetRows(xlsx_file) except ValueError as exception: self.fail(exception) self.assertEqual(expected_header, rows[0])
def testExtractEventsFromSourceSingleFile(self): """Tests the ExtractEventsFromSources function on a single file.""" test_artifacts_path = self._GetTestFilePath(['artifacts']) self._SkipIfPathNotExists(test_artifacts_path) test_file_path = self._GetTestFilePath(['System.evtx']) self._SkipIfPathNotExists(test_file_path) output_writer = test_lib.TestOutputWriter(encoding='utf-8') test_tool = psteal_tool.PstealTool(output_writer=output_writer) options = test_lib.TestOptions() options.artifact_definitions_path = test_artifacts_path options.quiet = True options.status_view_mode = 'none' options.source = test_file_path with shared_test_lib.TempDirectory() as temp_directory: options.log_file = os.path.join(temp_directory, 'output.log') options.storage_file = os.path.join(temp_directory, 'storage.plaso') options.write = os.path.join(temp_directory, 'output.txt') test_tool.ParseOptions(options) test_tool.ExtractEventsFromSources() expected_output = [ '', 'Source path\t\t: {0:s}'.format(options.source), 'Source type\t\t: single file', 'Processing time\t\t: 00:00:00', '', 'Processing started.', 'Processing completed.', '', '' ] output = output_writer.ReadOutput() self._CheckOutput(output, expected_output)
def testProcessSourcesExtractWithArtifactsGroupFilter(self): """Tests the ProcessSources function with a group artifacts filter file.""" test_artifacts_path = self._GetTestFilePath(['artifacts']) self._SkipIfPathNotExists(test_artifacts_path) test_file_path = self._GetTestFilePath(['image.qcow2']) self._SkipIfPathNotExists(test_file_path) output_writer = test_lib.TestOutputWriter(encoding='utf-8') test_tool = image_export_tool.ImageExportTool( output_writer=output_writer) options = test_lib.TestOptions() options.artifact_definitions_path = test_artifacts_path options.image = test_file_path options.quiet = True options.artifact_filter_string = 'TestGroupExport' with shared_test_lib.TempDirectory() as temp_directory: options.path = temp_directory test_tool.ParseOptions(options) test_tool.ProcessSources() expected_extracted_files = sorted([ os.path.join(temp_directory, 'a_directory'), os.path.join(temp_directory, 'a_directory', 'another_file'), os.path.join(temp_directory, 'a_directory', 'a_file'), os.path.join(temp_directory, 'passwords.txt'), os.path.join(temp_directory, 'hashes.json') ]) extracted_files = self._RecursiveList(temp_directory) self.assertEqual(sorted(extracted_files), expected_extracted_files)
def testWriteStringTables(self): """Tests the _WriteStringTables function.""" message_resource_file = resource_file.MessageResourceFile( 'C:\\Windows\\System32\\wrc_test.dll') test_file_path = self._GetTestFilePath(['wrc_test.dll']) with open(test_file_path, 'rb') as file_object: message_resource_file.OpenFileObject(file_object) database_writer = database.MessageFileSqlite3DatabaseWriter( message_resource_file) with shared_test_lib.TempDirectory() as temporary_directory: test_file_path = os.path.join(temporary_directory, 'message_file.db') database_writer.Open(test_file_path) database_writer._WriteMessageFile(message_resource_file) database_writer._WriteStringTables() database_writer.Close() message_resource_file.Close()
def testExtractEventsFromSourcesWithFilestat(self): """Tests the ExtractEventsFromSources function with filestat parser.""" output_writer = test_lib.TestOutputWriter(encoding=self._OUTPUT_ENCODING) test_tool = log2timeline_tool.Log2TimelineTool(output_writer=output_writer) source_path = self._GetTestFilePath(['test_pe.exe']) options = self._CreateExtractionOptions(source_path) options.parsers = 'filestat,pe' with shared_test_lib.TempDirectory() as temp_directory: options.storage_file = os.path.join(temp_directory, 'storage.plaso') options.storage_format = definitions.STORAGE_FORMAT_SQLITE options.task_storage_format = definitions.STORAGE_FORMAT_SQLITE test_tool.ParseOptions(options) test_tool.ExtractEventsFromSources() storage_file = sqlite_file.SQLiteStorageFile() try: storage_file.Open(path=options.storage_file, read_only=True) except IOError as exception: self.fail(( 'Unable to open storage file after processing with error: ' '{0!s}.').format(exception)) # There should be 3 filestat and 3 pe parser generated events. # Typically there are 3 filestat events, but there can be 4 on platforms # that support os.stat_result st_birthtime. expected_event_counters = { 'fs:stat': [3, 4], 'pe:delay_import:import_time': 1, 'pe:import:import_time': 1, 'pe:compilation:compilation_time': 1} self.CheckEventCounters(storage_file, expected_event_counters)
def testProcessSourcesExtractWithSignaturesFilter(self): """Tests the ProcessSources function with a signatures filter.""" output_writer = test_lib.TestOutputWriter(encoding='utf-8') test_tool = image_export_tool.ImageExportTool(output_writer=output_writer) options = test_lib.TestOptions() options.artifact_definitions_path = self._GetTestFilePath(['artifacts']) options.image = self._GetTestFilePath(['syslog_image.dd']) options.quiet = True options.signature_identifiers = 'gzip' with shared_test_lib.TempDirectory() as temp_directory: options.path = temp_directory test_tool.ParseOptions(options) test_tool.ProcessSources() expected_extracted_files = sorted([ os.path.join(temp_directory, 'logs'), os.path.join(temp_directory, 'logs', 'sys.tgz')]) extracted_files = self._RecursiveList(temp_directory) self.assertEqual(sorted(extracted_files), expected_extracted_files)
def testProcessSourcesExtractWithDateTimeFilter(self): """Tests the ProcessSources function with a date time filter.""" output_writer = test_lib.TestOutputWriter(encoding='utf-8') test_tool = image_export_tool.ImageExportTool(output_writer=output_writer) options = test_lib.TestOptions() options.artifact_definitions_path = self._GetTestFilePath(['artifacts']) options.date_filters = ['ctime,2012-05-25 15:59:00,2012-05-25 15:59:20'] options.image = self._GetTestFilePath(['image.qcow2']) options.quiet = True with shared_test_lib.TempDirectory() as temp_directory: options.path = temp_directory test_tool.ParseOptions(options) test_tool.ProcessSources() expected_extracted_files = sorted([ os.path.join(temp_directory, 'a_directory'), os.path.join(temp_directory, 'a_directory', 'a_file')]) extracted_files = self._RecursiveList(temp_directory) self.assertEqual(sorted(extracted_files), expected_extracted_files)
def testMergeAttributeContainers(self): """Tests the MergeAttributeContainers function.""" session = sessions.Session() with shared_test_lib.TempDirectory() as temp_directory: task_storage_path = os.path.join(temp_directory, 'task.sqlite') self._CreateTaskStorageFile(task_storage_path, self._TEST_EVENTS) session_storage_path = os.path.join(temp_directory, 'plaso.sqlite') storage_writer = sqlite_writer.SQLiteStorageFileWriter( session_storage_path) task_storage_reader = sqlite_reader.SQLiteStorageFileReader( task_storage_path) test_reader = merge_reader.StorageMergeReader( session, storage_writer, task_storage_reader) storage_writer.Open(path=task_storage_path) result = test_reader.MergeAttributeContainers() self.assertTrue(result) storage_writer.Close()