def _CreateOutputModule(self): """Creates a default output module Raises: BadConfigOption: when the output_filename already exists or hasn't been set. """ self._output_module = self._analysis_front_end.CreateOutputModule( self._output_format, preferred_encoding=self.preferred_encoding, timezone=self._timezone) if isinstance(self._output_module, output_interface.LinearOutputModule): if not self._output_filename: raise errors.BadConfigOption( u'Output format: {0:s} requires an output file.'.format( self._output_format)) if self._output_filename and os.path.exists(self._output_filename): raise errors.BadConfigOption( u'Output file already exists: {0:s}. Aborting.'.format( self._output_filename)) output_file_object = open(self._output_filename, u'wb') output_writer = cli_tools.FileObjectOutputWriter(output_file_object) self._output_module.SetOutputWriter(output_writer)
def _CreateOutputModule(self, options): """Creates the output module. Args: options (argparse.Namespace): command line arguments. Returns: OutputModule: output module. """ formatter_mediator = formatters_mediator.FormatterMediator( data_location=self._data_location) try: formatter_mediator.SetPreferredLanguageIdentifier( self._preferred_language) except (KeyError, TypeError) as exception: raise RuntimeError(exception) mediator = output_mediator.OutputMediator( self._knowledge_base, formatter_mediator, preferred_encoding=self.preferred_encoding) mediator.SetTimezone(self._preferred_time_zone) try: output_module = output_manager.OutputManager.NewOutputModule( self._output_format, mediator) except (KeyError, ValueError) as exception: raise RuntimeError( 'Unable to create output module with error: {0!s}'.format( exception)) if output_manager.OutputManager.IsLinearOutputModule(self._output_format): output_file_object = open(self._output_filename, 'wb') output_writer = tools.FileObjectOutputWriter(output_file_object) output_module.SetOutputWriter(output_writer) helpers_manager.ArgumentHelperManager.ParseOptions(options, output_module) # Check if there are parameters that have not been defined and need to # in order for the output module to continue. Prompt user to supply # those that may be missing. missing_parameters = output_module.GetMissingArguments() while missing_parameters: for parameter in missing_parameters: value = self._PromptUserForInput( 'Missing parameter {0:s} for output module'.format(parameter)) if value is None: logger.warning( 'Unable to set the missing parameter for: {0:s}'.format( parameter)) continue setattr(options, parameter, value) helpers_manager.ArgumentHelperManager.ParseOptions( options, output_module) missing_parameters = output_module.GetMissingArguments() return output_module
def ParseOptions(self, options): """Parses the options. Args: options (argparse.Namespace): command line arguments. Raises: BadConfigOption: if the options are invalid. """ self._ParseInformationalOptions(options) self._verbose = getattr(options, 'verbose', False) self._sections = getattr(options, 'sections', '') self.list_sections = self._sections == 'list' self.show_troubleshooting = getattr(options, 'show_troubleshooting', False) if self.list_sections or self.show_troubleshooting: return if self._sections != 'all': self._sections = self._sections.split(',') self._output_filename = getattr(options, 'write', None) argument_helper_names = ['process_resources', 'storage_file'] helpers_manager.ArgumentHelperManager.ParseOptions( options, self, names=argument_helper_names) # TODO: move check into _CheckStorageFile. if not self._storage_file_path: raise errors.BadConfigOption('Missing storage file option.') if not os.path.isfile(self._storage_file_path): raise errors.BadConfigOption( 'No such storage file: {0:s}.'.format(self._storage_file_path)) compare_storage_file_path = self.ParseStringOption( options, 'compare_storage_file') if compare_storage_file_path: if not os.path.isfile(compare_storage_file_path): raise errors.BadConfigOption( 'No such storage file: {0:s}.'.format(compare_storage_file_path)) self._compare_storage_file_path = compare_storage_file_path self.compare_storage_information = True self._output_format = self.ParseStringOption(options, 'output_format') if self._output_filename: if os.path.exists(self._output_filename): raise errors.BadConfigOption( 'Output file already exists: {0:s}.'.format(self._output_filename)) output_file_object = open(self._output_filename, 'wb') self._output_writer = tools.FileObjectOutputWriter(output_file_object) self._EnforceProcessMemoryLimit(self._process_memory_limit)
def ParseOptions(self, options): """Parses the options. Args: options (argparse.Namespace): command line arguments. Raises: BadConfigOption: if the options are invalid. """ self._ParseInformationalOptions(options) if self._debug_mode: logging_level = logging.DEBUG elif self._quiet_mode: logging_level = logging.WARNING else: logging_level = logging.INFO self._ConfigureLogging(log_level=logging_level) self._verbose = getattr(options, u'verbose', False) self._output_filename = getattr(options, u'write', None) helpers_manager.ArgumentHelperManager.ParseOptions( options, self, names=[u'storage_file']) # TODO: move check into _CheckStorageFile. if not self._storage_file_path: raise errors.BadConfigOption(u'Missing storage file option.') if not os.path.isfile(self._storage_file_path): raise errors.BadConfigOption( u'No such storage file: {0:s}.'.format( self._storage_file_path)) compare_storage_file_path = self.ParseStringOption( options, u'compare_storage_file') if compare_storage_file_path: if not os.path.isfile(compare_storage_file_path): raise errors.BadConfigOption( u'No such storage file: {0:s}.'.format( compare_storage_file_path)) self._compare_storage_file_path = compare_storage_file_path self.compare_storage_information = True self._output_format = self.ParseStringOption(options, u'output_format') if self._output_filename: if os.path.exists(self._output_filename): raise errors.BadConfigOption( u'Output file already exists: {0:s}.'.format( self._output_filename)) output_file_object = open(self._output_filename, u'wb') self._output_writer = tools.FileObjectOutputWriter( output_file_object)
def _ParseOutputModuleOptions(self, options): """Parses the output module options. Args: options (argparse.Namespace): command line arguments. Raises: BadConfigOption: if the options are invalid. """ preferred_time_zone = self._preferred_time_zone or u'UTC' formatter_mediator = formatters_mediator.FormatterMediator( data_location=self._data_location) try: formatter_mediator.SetPreferredLanguageIdentifier( self._preferred_language) except (KeyError, TypeError) as exception: raise RuntimeError(exception) output_mediator_object = output_mediator.OutputMediator( self._knowledge_base, formatter_mediator, preferred_encoding=self.preferred_encoding) output_mediator_object.SetTimezone(preferred_time_zone) try: self._output_module = output_manager.OutputManager.NewOutputModule( self._output_format, output_mediator_object) except IOError as exception: raise RuntimeError( u'Unable to create output module with error: {0:s}'.format( exception)) if not self._output_module: raise RuntimeError(u'Missing output module.') if isinstance(self._output_module, output_interface.LinearOutputModule): if not self._output_filename: raise errors.BadConfigOption( (u'Output format: {0:s} requires an output file').format( self._output_format)) if os.path.exists(self._output_filename): raise errors.BadConfigOption( u'Output file already exists: {0:s}.'.format( self._output_filename)) output_file_object = open(self._output_filename, u'wb') output_writer = tools.FileObjectOutputWriter(output_file_object) self._output_module.SetOutputWriter(output_writer) helpers_manager.ArgumentHelperManager.ParseOptions( options, self._output_module)
def testWriteUtf8(self): """Tests the Write function with UTF-8 encoding.""" file_object = io.BytesIO() output_writer = tools.FileObjectOutputWriter(file_object) output_writer.Write('A first string\n') byte_stream = self._ReadOutput(file_object) self.assertEqual(byte_stream, b'A first string\n') # Unicode string with non-ASCII characters. output_writer.Write('þriðja string\n') byte_stream = self._ReadOutput(file_object) self.assertEqual(byte_stream, b'\xc3\xberi\xc3\xb0ja string\n')
def testWriteAscii(self): """Tests the Write function with ASCII encoding.""" file_object = io.BytesIO() output_writer = tools.FileObjectOutputWriter(file_object, encoding='ascii') output_writer.Write('A first string\n') byte_stream = self._ReadOutput(file_object) self.assertEqual(byte_stream, b'A first string\n') # Unicode string with non-ASCII characters. output_writer.Write('þriðja string\n') byte_stream = self._ReadOutput(file_object) self.assertEqual(byte_stream, b'?ri?ja string\n')
def testPrintTSKPartitionIdentifiersOverview(self): """Tests the _PrintTSKPartitionIdentifiersOverview function.""" test_file_path = self._GetTestFilePath(['tsk_volume_system.raw']) self._SkipIfPathNotExists(test_file_path) test_os_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=test_file_path) test_raw_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_RAW, parent=test_os_path_spec) test_tsk_partition_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_TSK_PARTITION, parent=test_raw_path_spec) volume_system = tsk_volume_system.TSKVolumeSystem() volume_system.Open(test_tsk_partition_path_spec) file_object = io.BytesIO() test_output_writer = tools.FileObjectOutputWriter(file_object) test_mediator = storage_media_tool.StorageMediaToolMediator( output_writer=test_output_writer) test_mediator._PrintTSKPartitionIdentifiersOverview( volume_system, ['p1', 'p2']) file_object.seek(0, os.SEEK_SET) output_data = file_object.read() expected_output_data = [ b'The following partitions were found:', b'', b'Identifier Offset (in bytes) Size (in bytes)', (b'p1 512 (0x00000200) 175.0KiB / 179.2kB ' b'(179200 B)'), b'p2 180224 (0x0002c000) 1.2MiB / 1.3MB (1294336 B)', b'', b''] if not win32console: # Using join here since Python 3 does not support format of bytes. expected_output_data[2] = b''.join([ b'\x1b[1m', expected_output_data[2], b'\x1b[0m']) self.assertEqual(output_data.split(b'\n'), expected_output_data)
def testPrintAPFSVolumeIdentifiersOverview(self): """Tests the _PrintAPFSVolumeIdentifiersOverview function.""" test_file_path = self._GetTestFilePath(['apfs.dmg']) self._SkipIfPathNotExists(test_file_path) test_os_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=test_file_path) test_raw_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_RAW, parent=test_os_path_spec) test_tsk_partition_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.PREFERRED_GPT_BACK_END, location='/p1', parent=test_raw_path_spec) test_apfs_container_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_APFS_CONTAINER, location='/', parent=test_tsk_partition_path_spec) volume_system = apfs_volume_system.APFSVolumeSystem() volume_system.Open(test_apfs_container_path_spec) file_object = io.BytesIO() test_output_writer = tools.FileObjectOutputWriter(file_object) test_mediator = storage_media_tool.StorageMediaToolMediator( output_writer=test_output_writer) test_mediator._PrintAPFSVolumeIdentifiersOverview(volume_system, ['apfs1']) file_object.seek(0, os.SEEK_SET) output_data = file_object.read() expected_output_data = [ b'The following Apple File System (APFS) volumes were found:', b'', b'Identifier Name', b'apfs1 SingleVolume', b'', b''] if not win32console: # Using join here since Python 3 does not support format of bytes. expected_output_data[2] = b''.join([ b'\x1b[1m', expected_output_data[2], b'\x1b[0m']) self.assertEqual(output_data.split(b'\n'), expected_output_data)
def testPrintLVMVolumeIdentifiersOverview(self): """Tests the _PrintLVMVolumeIdentifiersOverview function.""" test_file_path = self._GetTestFilePath(['lvm.raw']) self._SkipIfPathNotExists(test_file_path) test_os_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=test_file_path) test_raw_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_RAW, parent=test_os_path_spec) test_lvm_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_LVM, location='/', parent=test_raw_path_spec) volume_system = lvm_volume_system.LVMVolumeSystem() volume_system.Open(test_lvm_path_spec) file_object = io.BytesIO() test_output_writer = tools.FileObjectOutputWriter(file_object) test_mediator = storage_media_tool.StorageMediaToolMediator( output_writer=test_output_writer) test_mediator._PrintLVMVolumeIdentifiersOverview( volume_system, ['lvm1', 'lvm2']) file_object.seek(0, os.SEEK_SET) output_data = file_object.read() expected_output_data = [ b'The following Logical Volume Manager (LVM) volumes were found:', b'', b'Identifier', b'lvm1', b'lvm2', b'', b''] if not win32console: # Using join here since Python 3 does not support format of bytes. expected_output_data[2] = b''.join([ b'\x1b[1m', expected_output_data[2], b'\x1b[0m']) self.assertEqual(output_data.split(b'\n'), expected_output_data)
def testPrintVSSStoreIdentifiersOverview(self): """Tests the _PrintVSSStoreIdentifiersOverview function.""" test_file_path = self._GetTestFilePath(['vsstest.qcow2']) self._SkipIfPathNotExists(test_file_path) test_os_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=test_file_path) test_qcow_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_QCOW, parent=test_os_path_spec) test_vss_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_VSHADOW, parent=test_qcow_path_spec) volume_system = vshadow_volume_system.VShadowVolumeSystem() volume_system.Open(test_vss_path_spec) file_object = io.BytesIO() test_output_writer = tools.FileObjectOutputWriter(file_object) test_mediator = storage_media_tool.StorageMediaToolMediator( output_writer=test_output_writer) test_mediator._PrintVSSStoreIdentifiersOverview( volume_system, ['vss1', 'vss2']) file_object.seek(0, os.SEEK_SET) output_data = file_object.read() expected_output_data = [ b'The following Volume Shadow Snapshots (VSS) were found:', b'', b'Identifier Creation Time', b'vss1 2013-12-03 06:35:09.7363787', b'vss2 2013-12-03 06:37:48.9190583', b'', b''] if not win32console: # Using join here since Python 3 does not support format of bytes. expected_output_data[2] = b''.join([ b'\x1b[1m', expected_output_data[2], b'\x1b[0m']) self.assertEqual(output_data.split(b'\n'), expected_output_data)
def ProcessStorage(self, options): """Open a storage file and processes the events within. Args: options: the command line arguments (instance of argparse.Namespace). Returns: A counter. Raises: RuntimeError: if a non-recoverable situation is encountered. """ counter = None slice_option = getattr(options, u'slice', None) if slice_option: timezone = getattr(options, u'timezone', u'UTC') if timezone == u'UTC': zone = pytz.UTC else: zone = pytz.timezone(timezone) timestamp = timelib.Timestamp.FromTimeString(slice_option, timezone=zone) # Convert number of minutes to microseconds. range_operator = self._slice_size * 60 * 1000000 # Set the time range. pfilter.TimeRangeCache.SetLowerTimestamp(timestamp - range_operator) pfilter.TimeRangeCache.SetUpperTimestamp(timestamp + range_operator) analysis_plugins = getattr(options, u'analysis_plugins', u'') if analysis_plugins: read_only = False else: read_only = True analysis_plugins_output_format = getattr(options, u'windows-services-output', u'text') try: storage_file = self.OpenStorageFile(read_only=read_only) except IOError as exception: raise RuntimeError( u'Unable to open storage file: {0:s} with error: {1:s}.'. format(self._storage_file_path, exception)) with storage_file: storage_file.SetStoreLimit(self._filter_object) formatter_mediator = self.GetFormatterMediator() try: formatter_mediator.SetPreferredLanguageIdentifier( self._preferred_language) except (KeyError, TypeError) as exception: raise RuntimeError(exception) output_mediator_object = output_mediator.OutputMediator( formatter_mediator, storage_file, config=options) kwargs = {} # TODO: refactor this to use CLI argument helpers. if self._output_format in [u'pstorage', u'sql4n6']: kwargs[u'filehandle'] = self._output_filename elif self._output_format not in [u'elastic', u'timesketch']: if self._output_filename: self._output_file_object = open(self._output_filename, 'wb') kwargs[ u'output_writer'] = cli_tools.FileObjectOutputWriter( self._output_file_object) else: kwargs[u'output_writer'] = self._output_writer try: output_module = output_manager.OutputManager.NewOutputModule( self._output_format, output_mediator_object, **kwargs) except IOError as exception: raise RuntimeError( u'Unable to create output module with error: {0:s}'.format( exception)) if not output_module: raise RuntimeError(u'Missing output module.') if analysis_plugins: logging.info(u'Starting analysis plugins.') # Within all preprocessing objects, try to get the last one that has # time zone information stored in it, the highest chance of it # containing the information we are seeking (defaulting to the last # one). pre_objs = storage_file.GetStorageInformation() pre_obj = pre_objs[-1] for obj in pre_objs: if getattr(obj, u'time_zone_str', u''): pre_obj = obj # Fill in the collection information. pre_obj.collection_information = {} encoding = getattr(pre_obj, u'preferred_encoding', None) if encoding: cmd_line = u' '.join(sys.argv) try: pre_obj.collection_information[ u'cmd_line'] = cmd_line.decode(encoding) except UnicodeDecodeError: pass pre_obj.collection_information[u'file_processed'] = ( self._storage_file_path) pre_obj.collection_information[ u'method'] = u'Running Analysis Plugins' pre_obj.collection_information[u'plugins'] = analysis_plugins time_of_run = timelib.Timestamp.GetNow() pre_obj.collection_information[u'time_of_run'] = time_of_run pre_obj.counter = collections.Counter() # Assign the preprocessing object to the storage. # This is normally done in the construction of the storage object, # however we cannot do that here since the preprocessing object is # stored inside the storage file, so we need to open it first to # be able to read it in, before we make changes to it. Thus we need # to access this protected member of the class. # pylint: disable=protected-access storage_file._pre_obj = pre_obj # Start queues and load up plugins. # TODO: add upper queue limit. analysis_output_queue = multi_process.MultiProcessingQueue() event_queue_producers = [] event_queues = [] analysis_plugins_list = [ name.strip() for name in analysis_plugins.split(u',') ] for _ in xrange(0, len(analysis_plugins_list)): # TODO: add upper queue limit. analysis_plugin_queue = multi_process.MultiProcessingQueue( ) event_queues.append(analysis_plugin_queue) event_queue_producers.append( queue.ItemQueueProducer(event_queues[-1])) knowledge_base_object = knowledge_base.KnowledgeBase() analysis_plugins = analysis_manager.AnalysisPluginManager.LoadPlugins( analysis_plugins_list, event_queues, options=options) # Now we need to start all the plugins. for analysis_plugin in analysis_plugins: analysis_report_queue_producer = queue.ItemQueueProducer( analysis_output_queue) analysis_mediator_object = analysis_mediator.AnalysisMediator( analysis_report_queue_producer, knowledge_base_object, output_format=analysis_plugins_output_format) analysis_process = multiprocessing.Process( name=u'Analysis {0:s}'.format( analysis_plugin.plugin_name), target=analysis_plugin.RunPlugin, args=(analysis_mediator_object, )) self._analysis_processes.append(analysis_process) analysis_process.start() logging.info(u'Plugin: [{0:s}] started.'.format( analysis_plugin.plugin_name)) else: event_queue_producers = [] deduplicate_events = getattr(options, u'dedup', True) output_buffer = output_interface.EventBuffer( output_module, deduplicate_events) with output_buffer: counter = self.ProcessOutput( storage_file, output_buffer, my_filter=self._filter_object, filter_buffer=self._filter_buffer, analysis_queues=event_queue_producers) for information in storage_file.GetStorageInformation(): if hasattr(information, u'counter'): counter[u'Stored Events'] += information.counter[u'total'] if not getattr(options, u'quiet', False): logging.info(u'Output processing is done.') # Get all reports and tags from analysis plugins. if analysis_plugins: logging.info(u'Processing data from analysis plugins.') for event_queue_producer in event_queue_producers: event_queue_producer.SignalEndOfInput() # Wait for all analysis plugins to complete. for number, analysis_process in enumerate( self._analysis_processes): logging.debug( u'Waiting for analysis plugin: {0:d} to complete.'. format(number)) if analysis_process.is_alive(): analysis_process.join(10) else: logging.warning( u'Plugin {0:d} already stopped.'.format(number)) analysis_process.terminate() logging.debug(u'All analysis plugins are now stopped.') # Close the output queue. analysis_output_queue.SignalEndOfInput() # Go over each output. analysis_queue_consumer = PsortAnalysisReportQueueConsumer( analysis_output_queue, storage_file, self._filter_expression, self.preferred_encoding) analysis_queue_consumer.ConsumeItems() if analysis_queue_consumer.tags: storage_file.StoreTagging(analysis_queue_consumer.tags) # TODO: analysis_queue_consumer.anomalies: for item, value in analysis_queue_consumer.counter.iteritems(): counter[item] = value if self._output_file_object: self._output_file_object.close() self._output_file_object = None if self._filter_object and not counter[u'Limited By']: counter[u'Filter By Date'] = (counter[u'Stored Events'] - counter[u'Events Included'] - counter[u'Events Filtered Out']) return counter
def _ProcessStorage(self): """Processes a plaso storage file. Raises: BadConfigOption: when a configuration parameter fails validation. RuntimeError: if a non-recoverable situation is encountered. """ if self._analysis_plugins: read_only = False else: read_only = True try: storage_file = self._front_end.OpenStorage(self._storage_file_path, read_only=read_only) except IOError as exception: raise RuntimeError( u'Unable to open storage file: {0:s} with error: {1:s}.'. format(self._storage_file_path, exception)) output_module = self._front_end.GetOutputModule( storage_file, preferred_encoding=self.preferred_encoding, timezone=self._timezone) if isinstance(output_module, output_interface.LinearOutputModule): if self._output_filename: output_file_object = open(self._output_filename, u'wb') output_writer = cli_tools.FileObjectOutputWriter( output_file_object) else: output_writer = cli_tools.StdoutOutputWriter() output_module.SetOutputWriter(output_writer) helpers_manager.ArgumentHelperManager.ParseOptions( self._options, output_module) # Check if there are parameters that have not been defined and need to # in order for the output module to continue. Prompt user to supply # those that may be missing. missing_parameters = output_module.GetMissingArguments() while missing_parameters: # TODO: refactor this. configuration_object = PsortOptions() setattr(configuration_object, u'output_format', output_module.NAME) for parameter in missing_parameters: value = self._PromptUserForInput( u'Missing parameter {0:s} for output module'.format( parameter)) if value is None: logging.warning( u'Unable to set the missing parameter for: {0:s}'. format(parameter)) continue setattr(configuration_object, parameter, value) helpers_manager.ArgumentHelperManager.ParseOptions( configuration_object, output_module) missing_parameters = output_module.GetMissingArguments() # TODO: fix or remove this comment. # Get ANALYSIS PLUGINS AND CONFIGURE! get_plugins_and_producers = self._front_end.GetAnalysisPluginsAndEventQueues analysis_plugins, event_queue_producers = get_plugins_and_producers( self._analysis_plugins) for analysis_plugin in analysis_plugins: helpers_manager.ArgumentHelperManager.ParseOptions( self._options, analysis_plugin) try: counter = self._front_end.ProcessStorage( output_module, storage_file, self._storage_file_path, analysis_plugins, event_queue_producers, command_line_arguments=self._command_line_arguments, deduplicate_events=self._deduplicate_events, preferred_encoding=self.preferred_encoding, time_slice=self._time_slice, use_time_slicer=self._use_time_slicer) finally: storage_file.Close() if not self._quiet_mode: self._output_writer.Write(u'Processing completed.\n') table_view = cli_views.ViewsFactory.GetTableView( self._views_format_type, title=u'Counter') for element, count in counter.most_common(): table_view.AddRow([element, count]) table_view.Write(self._output_writer)
def ProcessStorage(self): """Processes a plaso storage file. Raises: BadConfigOption: when a configuration parameter fails validation. RuntimeError: if a non-recoverable situation is encountered. """ preferred_time_zone = self._preferred_time_zone or u'UTC' output_module = self._front_end.CreateOutputModule( self._output_format, preferred_encoding=self.preferred_encoding, timezone=preferred_time_zone) if isinstance(output_module, output_interface.LinearOutputModule): if not self._output_filename: raise errors.BadConfigOption(( u'Output format: {0:s} requires an output file').format( self._output_format)) if self._output_filename and os.path.exists(self._output_filename): raise errors.BadConfigOption(( u'Output file already exists: {0:s}. Aborting.').format( self._output_filename)) output_file_object = open(self._output_filename, u'wb') output_writer = cli_tools.FileObjectOutputWriter(output_file_object) output_module.SetOutputWriter(output_writer) helpers_manager.ArgumentHelperManager.ParseOptions( self._options, output_module) # Check if there are parameters that have not been defined and need to # in order for the output module to continue. Prompt user to supply # those that may be missing. missing_parameters = output_module.GetMissingArguments() while missing_parameters: # TODO: refactor this. configuration_object = PsortOptions() setattr(configuration_object, u'output_format', output_module.NAME) for parameter in missing_parameters: value = self._PromptUserForInput( u'Missing parameter {0:s} for output module'.format(parameter)) if value is None: logging.warning( u'Unable to set the missing parameter for: {0:s}'.format( parameter)) continue setattr(configuration_object, parameter, value) helpers_manager.ArgumentHelperManager.ParseOptions( configuration_object, output_module) missing_parameters = output_module.GetMissingArguments() analysis_plugins = self._front_end.GetAnalysisPlugins( self._analysis_plugins) for analysis_plugin in analysis_plugins: helpers_manager.ArgumentHelperManager.ParseOptions( self._options, analysis_plugin) if self._status_view_mode == u'linear': status_update_callback = self._PrintStatusUpdateStream elif self._status_view_mode == u'window': status_update_callback = self._PrintStatusUpdate else: status_update_callback = None session = self._front_end.CreateSession( command_line_arguments=self._command_line_arguments, preferred_encoding=self.preferred_encoding) storage_reader = self._front_end.CreateStorageReader( self._storage_file_path) self._number_of_analysis_reports = ( storage_reader.GetNumberOfAnalysisReports()) storage_reader.Close() configuration = configurations.ProcessingConfiguration() configuration.data_location = self._options.data_location if analysis_plugins: storage_writer = self._front_end.CreateStorageWriter( session, self._storage_file_path) # TODO: handle errors.BadConfigOption self._front_end.AnalyzeEvents( storage_writer, analysis_plugins, configuration, status_update_callback=status_update_callback) counter = collections.Counter() if self._output_format != u'null': storage_reader = self._front_end.CreateStorageReader( self._storage_file_path) events_counter = self._front_end.ExportEvents( storage_reader, output_module, configuration, deduplicate_events=self._deduplicate_events, status_update_callback=status_update_callback, time_slice=self._time_slice, use_time_slicer=self._use_time_slicer) counter += events_counter for item, value in iter(session.analysis_reports_counter.items()): counter[item] = value if self._quiet_mode: return self._output_writer.Write(u'Processing completed.\n') table_view = cli_views.ViewsFactory.GetTableView( self._views_format_type, title=u'Counter') for element, count in counter.most_common(): if not element: element = u'N/A' table_view.AddRow([element, count]) table_view.Write(self._output_writer) storage_reader = self._front_end.CreateStorageReader( self._storage_file_path) self._PrintAnalysisReportsDetails(storage_reader)
def _ProcessStorage(self): """Processes a plaso storage file. Raises: RuntimeError: if a non-recoverable situation is encountered. """ time_slice = None if self._time_slice_event_time_string is not None or self._use_time_slicer: time_slice = self._front_end.GetTimeSlice( self._time_slice_event_time_string, duration=self._time_slice_duration, timezone=self._timezone) if self._analysis_plugins: read_only = False else: read_only = True try: storage_file = self._front_end.OpenStorage(self._storage_file_path, read_only=read_only) except IOError as exception: raise RuntimeError( u'Unable to open storage file: {0:s} with error: {1:s}.'. format(self._storage_file_path, exception)) output_module = self._front_end.GetOutputModule( storage_file, preferred_encoding=self.preferred_encoding, timezone=self._timezone) if isinstance(output_module, output_interface.LinearOutputModule): if self._output_filename: output_file_object = open(self._output_filename, u'wb') output_writer = cli_tools.FileObjectOutputWriter( output_file_object) else: output_writer = cli_tools.StdoutOutputWriter() output_module.SetOutputWriter(output_writer) # TODO: To set the filter we need to have the filter object. This may # be better handled in an argument helper, but ATM the argument helper # does not have access to the actual filter object. if hasattr(output_module, u'SetFieldsFilter') and self._filter_object: output_module.SetFieldsFilter(self._filter_object) try: helpers_manager.ArgumentHelperManager.ParseOptions( self._options, output_module) except errors.BadConfigOption as exception: raise RuntimeError(exception) # Check if there are parameters that have not been defined and need to # in order for the output module to continue. Prompt user to supply # those that may be missing. missing_parameters = output_module.GetMissingArguments() while missing_parameters: # TODO: refactor this. configuration_object = PsortOptions() setattr(configuration_object, u'output_format', output_module.NAME) for parameter in missing_parameters: value = self._PromptUserForInput( u'Missing parameter {0:s} for output module'.format( parameter)) if value is None: logging.warning( u'Unable to set the missing parameter for: {0:s}'. format(parameter)) continue setattr(configuration_object, parameter, value) helpers_manager.ArgumentHelperManager.ParseOptions( configuration_object, output_module) missing_parameters = output_module.GetMissingArguments() # Get ANALYSIS PLUGINS AND CONFIGURE! get_plugins_and_producers = self._front_end.GetAnalysisPluginsAndEventQueues analysis_plugins, event_queue_producers = get_plugins_and_producers( self._analysis_plugins) for analysis_plugin in analysis_plugins: helpers_manager.ArgumentHelperManager.ParseOptions( self._options, analysis_plugin) counter = self._front_end.ProcessStorage( output_module, storage_file, analysis_plugins, event_queue_producers, deduplicate_events=self._deduplicate_events, preferred_encoding=self.preferred_encoding, time_slice=time_slice, use_time_slicer=self._use_time_slicer) if not self._quiet_mode: self.PrintHeader(u'Counter') for element, count in counter.most_common(): self.PrintColumnValue(element, u'{0:d}'.format(count))
def _CreateOutputModule(self, options): """Creates the output module. Args: options (argparse.Namespace): command line arguments. Returns: OutputModule: output module. Raises: RuntimeError: if the output module cannot be created or parameters are missing while running in unattended mode. """ formatter_mediator = formatters_mediator.FormatterMediator( data_location=self._data_location) try: formatter_mediator.SetPreferredLanguageIdentifier( self._preferred_language) except (KeyError, TypeError) as exception: raise RuntimeError(exception) mediator = output_mediator.OutputMediator( self._knowledge_base, formatter_mediator, data_location=self._data_location, preferred_encoding=self.preferred_encoding) mediator.SetTimezone(self._output_time_zone) try: output_module = output_manager.OutputManager.NewOutputModule( self._output_format, mediator) except (KeyError, ValueError) as exception: raise RuntimeError( 'Unable to create output module with error: {0!s}'.format( exception)) if output_manager.OutputManager.IsLinearOutputModule( self._output_format): output_file_object = open(self._output_filename, 'wb') output_writer = tools.FileObjectOutputWriter(output_file_object) output_module.SetOutputWriter(output_writer) helpers_manager.ArgumentHelperManager.ParseOptions( options, output_module) # Check if there are parameters that have not been defined and need to # in order for the output module to continue. Prompt user to supply # those that may be missing. missing_parameters = output_module.GetMissingArguments() if missing_parameters and self._unattended_mode: raise RuntimeError( 'Unable to create output module missing parameters: {0:s}'. format(', '.join(missing_parameters))) while missing_parameters: self._PromptUserForMissingOutputModuleParameters( options, missing_parameters) helpers_manager.ArgumentHelperManager.ParseOptions( options, output_module) missing_parameters = output_module.GetMissingArguments() return output_module
def testPromptUserForVSSStoreIdentifiers(self): """Tests the _PromptUserForVSSStoreIdentifiers function.""" test_path = self._GetTestFilePath(['vsstest.qcow2']) test_os_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=test_path) test_qcow_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_QCOW, parent=test_os_path_spec) test_vss_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_VSHADOW, parent=test_qcow_path_spec) volume_system = vshadow_volume_system.VShadowVolumeSystem() volume_system.Open(test_vss_path_spec) file_object = io.BytesIO() test_output_writer = tools.FileObjectOutputWriter(file_object) test_tool = storage_media_tool.StorageMediaTool( output_writer=test_output_writer) # Test selection of single store. input_file_object = io.BytesIO(b'2\n') test_input_reader = tools.FileObjectInputReader(input_file_object) output_file_object = io.BytesIO() test_output_writer = tools.FileObjectOutputWriter(output_file_object) test_tool = storage_media_tool.StorageMediaTool( input_reader=test_input_reader, output_writer=test_output_writer) volume_identifiers = test_tool._PromptUserForVSSStoreIdentifiers( volume_system, ['vss1', 'vss2']) self.assertEqual(volume_identifiers, ['vss2']) # Test selection of single store. input_file_object = io.BytesIO(b'vss2\n') test_input_reader = tools.FileObjectInputReader(input_file_object) output_file_object = io.BytesIO() test_output_writer = tools.FileObjectOutputWriter(output_file_object) test_tool = storage_media_tool.StorageMediaTool( input_reader=test_input_reader, output_writer=test_output_writer) volume_identifiers = test_tool._PromptUserForVSSStoreIdentifiers( volume_system, ['vss1', 'vss2']) self.assertEqual(volume_identifiers, ['vss2']) # Test selection of single store with invalid input on first attempt. input_file_object = io.BytesIO(b'bogus\nvss2\n') test_input_reader = tools.FileObjectInputReader(input_file_object) output_file_object = io.BytesIO() test_output_writer = tools.FileObjectOutputWriter(output_file_object) test_tool = storage_media_tool.StorageMediaTool( input_reader=test_input_reader, output_writer=test_output_writer) volume_identifiers = test_tool._PromptUserForVSSStoreIdentifiers( volume_system, ['vss1', 'vss2']) self.assertEqual(volume_identifiers, ['vss2']) # Test selection of all stores. input_file_object = io.BytesIO(b'all\n') test_input_reader = tools.FileObjectInputReader(input_file_object) output_file_object = io.BytesIO() test_output_writer = tools.FileObjectOutputWriter(output_file_object) test_tool = storage_media_tool.StorageMediaTool( input_reader=test_input_reader, output_writer=test_output_writer) volume_identifiers = test_tool._PromptUserForVSSStoreIdentifiers( volume_system, ['vss1', 'vss2']) self.assertEqual(volume_identifiers, ['vss1', 'vss2']) # Test selection of no stores. input_file_object = io.BytesIO(b'\n') test_input_reader = tools.FileObjectInputReader(input_file_object) output_file_object = io.BytesIO() test_output_writer = tools.FileObjectOutputWriter(output_file_object) test_tool = storage_media_tool.StorageMediaTool( input_reader=test_input_reader, output_writer=test_output_writer) volume_identifiers = test_tool._PromptUserForVSSStoreIdentifiers( volume_system, ['vss1', 'vss2']) self.assertEqual(volume_identifiers, [])
def testPromptUserForPartitionIdentifiers(self): """Tests the _PromptUserForPartitionIdentifiers function.""" test_path = self._GetTestFilePath(['tsk_volume_system.raw']) test_os_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=test_path) test_raw_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_RAW, parent=test_os_path_spec) test_tsk_partition_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_TSK_PARTITION, parent=test_raw_path_spec) volume_system = tsk_volume_system.TSKVolumeSystem() volume_system.Open(test_tsk_partition_path_spec) file_object = io.BytesIO() test_output_writer = tools.FileObjectOutputWriter(file_object) test_tool = storage_media_tool.StorageMediaTool( output_writer=test_output_writer) # Test selection of single partition. input_file_object = io.BytesIO(b'2\n') test_input_reader = tools.FileObjectInputReader(input_file_object) output_file_object = io.BytesIO() test_output_writer = tools.FileObjectOutputWriter(output_file_object) test_tool = storage_media_tool.StorageMediaTool( input_reader=test_input_reader, output_writer=test_output_writer) volume_identifiers = test_tool._PromptUserForPartitionIdentifiers( volume_system, ['p1', 'p2']) self.assertEqual(volume_identifiers, ['p2']) # Test selection of single partition. input_file_object = io.BytesIO(b'p2\n') test_input_reader = tools.FileObjectInputReader(input_file_object) output_file_object = io.BytesIO() test_output_writer = tools.FileObjectOutputWriter(output_file_object) test_tool = storage_media_tool.StorageMediaTool( input_reader=test_input_reader, output_writer=test_output_writer) volume_identifiers = test_tool._PromptUserForPartitionIdentifiers( volume_system, ['p1', 'p2']) self.assertEqual(volume_identifiers, ['p2']) # Test selection of single partition with invalid input on first attempt. input_file_object = io.BytesIO(b'bogus\np2\n') test_input_reader = tools.FileObjectInputReader(input_file_object) output_file_object = io.BytesIO() test_output_writer = tools.FileObjectOutputWriter(output_file_object) test_tool = storage_media_tool.StorageMediaTool( input_reader=test_input_reader, output_writer=test_output_writer) volume_identifiers = test_tool._PromptUserForPartitionIdentifiers( volume_system, ['p1', 'p2']) self.assertEqual(volume_identifiers, ['p2']) # Test selection of all partitions. input_file_object = io.BytesIO(b'all\n') test_input_reader = tools.FileObjectInputReader(input_file_object) output_file_object = io.BytesIO() test_output_writer = tools.FileObjectOutputWriter(output_file_object) test_tool = storage_media_tool.StorageMediaTool( input_reader=test_input_reader, output_writer=test_output_writer) volume_identifiers = test_tool._PromptUserForPartitionIdentifiers( volume_system, ['p1', 'p2']) self.assertEqual(volume_identifiers, ['p1', 'p2'])
def testPromptUserForAPFSVolumeIdentifiers(self): """Tests the _PromptUserForAPFSVolumeIdentifiers function.""" test_path = self._GetTestFilePath(['apfs.dmg']) test_os_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=test_path) test_raw_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_RAW, parent=test_os_path_spec) test_tsk_partition_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_TSK_PARTITION, location='/p1', parent=test_raw_path_spec) test_apfs_container_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_APFS_CONTAINER, location='/', parent=test_tsk_partition_path_spec) volume_system = apfs_volume_system.APFSVolumeSystem() volume_system.Open(test_apfs_container_path_spec) # Test selection of single volume. input_file_object = io.BytesIO(b'1\n') test_input_reader = tools.FileObjectInputReader(input_file_object) output_file_object = io.BytesIO() test_output_writer = tools.FileObjectOutputWriter(output_file_object) test_tool = storage_media_tool.StorageMediaTool( input_reader=test_input_reader, output_writer=test_output_writer) volume_identifiers = test_tool._PromptUserForAPFSVolumeIdentifiers( volume_system, ['apfs1']) self.assertEqual(volume_identifiers, ['apfs1']) # Test selection of single volume. input_file_object = io.BytesIO(b'apfs1\n') test_input_reader = tools.FileObjectInputReader(input_file_object) output_file_object = io.BytesIO() test_output_writer = tools.FileObjectOutputWriter(output_file_object) test_tool = storage_media_tool.StorageMediaTool( input_reader=test_input_reader, output_writer=test_output_writer) volume_identifiers = test_tool._PromptUserForAPFSVolumeIdentifiers( volume_system, ['apfs1']) self.assertEqual(volume_identifiers, ['apfs1']) # Test selection of single volume with invalid input on first attempt. input_file_object = io.BytesIO(b'bogus\napfs1\n') test_input_reader = tools.FileObjectInputReader(input_file_object) output_file_object = io.BytesIO() test_output_writer = tools.FileObjectOutputWriter(output_file_object) test_tool = storage_media_tool.StorageMediaTool( input_reader=test_input_reader, output_writer=test_output_writer) volume_identifiers = test_tool._PromptUserForAPFSVolumeIdentifiers( volume_system, ['apfs1']) self.assertEqual(volume_identifiers, ['apfs1']) # Test selection of all volumes. input_file_object = io.BytesIO(b'all\n') test_input_reader = tools.FileObjectInputReader(input_file_object) output_file_object = io.BytesIO() test_output_writer = tools.FileObjectOutputWriter(output_file_object) test_tool = storage_media_tool.StorageMediaTool( input_reader=test_input_reader, output_writer=test_output_writer) volume_identifiers = test_tool._PromptUserForAPFSVolumeIdentifiers( volume_system, ['apfs1']) self.assertEqual(volume_identifiers, ['apfs1']) # Test selection of no volumes. input_file_object = io.BytesIO(b'\n') test_input_reader = tools.FileObjectInputReader(input_file_object) output_file_object = io.BytesIO() test_output_writer = tools.FileObjectOutputWriter(output_file_object) test_tool = storage_media_tool.StorageMediaTool( input_reader=test_input_reader, output_writer=test_output_writer) volume_identifiers = test_tool._PromptUserForAPFSVolumeIdentifiers( volume_system, ['apfs1']) self.assertEqual(volume_identifiers, [])
def testGetLVMVolumeIdentifiers(self): """Tests the GetLVMVolumeIdentifiers function.""" test_file_path = self._GetTestFilePath(['lvm.raw']) self._SkipIfPathNotExists(test_file_path) test_os_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=test_file_path) test_raw_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_RAW, parent=test_os_path_spec) test_lvm_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_LVM, location='/', parent=test_raw_path_spec) volume_system = lvm_volume_system.LVMVolumeSystem() volume_system.Open(test_lvm_path_spec) # Test selection of single volume. input_file_object = io.BytesIO(b'1\n') test_input_reader = tools.FileObjectInputReader(input_file_object) output_file_object = io.BytesIO() test_output_writer = tools.FileObjectOutputWriter(output_file_object) test_mediator = storage_media_tool.StorageMediaToolMediator( input_reader=test_input_reader, output_writer=test_output_writer) volume_identifiers = test_mediator.GetLVMVolumeIdentifiers( volume_system, ['lvm1']) self.assertEqual(volume_identifiers, ['lvm1']) # Test selection of single volume. input_file_object = io.BytesIO(b'lvm1\n') test_input_reader = tools.FileObjectInputReader(input_file_object) output_file_object = io.BytesIO() test_output_writer = tools.FileObjectOutputWriter(output_file_object) test_mediator = storage_media_tool.StorageMediaToolMediator( input_reader=test_input_reader, output_writer=test_output_writer) volume_identifiers = test_mediator.GetLVMVolumeIdentifiers( volume_system, ['lvm1']) self.assertEqual(volume_identifiers, ['lvm1']) # Test selection of single volume with invalid input on first attempt. input_file_object = io.BytesIO(b'bogus\nlvm1\n') test_input_reader = tools.FileObjectInputReader(input_file_object) output_file_object = io.BytesIO() test_output_writer = tools.FileObjectOutputWriter(output_file_object) test_mediator = storage_media_tool.StorageMediaToolMediator( input_reader=test_input_reader, output_writer=test_output_writer) volume_identifiers = test_mediator.GetLVMVolumeIdentifiers( volume_system, ['lvm1']) self.assertEqual(volume_identifiers, ['lvm1']) # Test selection of all volumes. input_file_object = io.BytesIO(b'all\n') test_input_reader = tools.FileObjectInputReader(input_file_object) output_file_object = io.BytesIO() test_output_writer = tools.FileObjectOutputWriter(output_file_object) test_mediator = storage_media_tool.StorageMediaToolMediator( input_reader=test_input_reader, output_writer=test_output_writer) volume_identifiers = test_mediator.GetLVMVolumeIdentifiers( volume_system, ['lvm1', 'lvm2']) self.assertEqual(volume_identifiers, ['lvm1', 'lvm2']) # Test selection of no volumes. input_file_object = io.BytesIO(b'\n') test_input_reader = tools.FileObjectInputReader(input_file_object) output_file_object = io.BytesIO() test_output_writer = tools.FileObjectOutputWriter(output_file_object) test_mediator = storage_media_tool.StorageMediaToolMediator( input_reader=test_input_reader, output_writer=test_output_writer) volume_identifiers = test_mediator.GetLVMVolumeIdentifiers( volume_system, ['lvm1']) self.assertEqual(volume_identifiers, [])