def testProcessSourceExtractWithFilter(self): """Tests extract with a filter file.""" test_front_end = image_export.ImageExportFrontend() options = frontend.Options() options.image = self._GetTestFilePath([u'image.qcow2']) options.path = self._temp_directory options.filter = os.path.join(self._temp_directory, u'filter.txt') with open(options.filter, 'wb') as file_object: file_object.write(b'/a_directory/.+_file\n') test_front_end.ParseOptions(options, source_option=u'image') test_front_end.ProcessSource(options) expected_extracted_files = sorted([ os.path.join(self._temp_directory, u'filter.txt'), os.path.join(self._temp_directory, u'a_directory'), os.path.join(self._temp_directory, u'a_directory', u'another_file'), os.path.join(self._temp_directory, u'a_directory', u'a_file') ]) extracted_files = self._RecursiveList(self._temp_directory) self.assertEqual(sorted(extracted_files), expected_extracted_files)
def testProcessSourcesExtractWithFilter(self): """Tests the ProcessSources function with a filter file.""" test_front_end = image_export.ImageExportFrontend() filter_file = os.path.join(self._temp_directory, u'filter.txt') with open(filter_file, 'wb') as file_object: file_object.write(b'/a_directory/.+_file\n') test_path = self._GetTestFilePath([u'image.qcow2']) os_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=test_path) qcow_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_QCOW, parent=os_path_spec) path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_TSK, location=u'/', parent=qcow_path_spec) test_front_end.ProcessSources( [path_spec], self._temp_directory, filter_file=filter_file) expected_extracted_files = sorted([ os.path.join(self._temp_directory, u'filter.txt'), os.path.join(self._temp_directory, u'a_directory'), os.path.join(self._temp_directory, u'a_directory', u'another_file'), os.path.join(self._temp_directory, u'a_directory', u'a_file')]) extracted_files = self._RecursiveList(self._temp_directory) self.assertEqual(sorted(extracted_files), expected_extracted_files)
def testProcessSourceExtractWithDateTimeFilter(self): """Tests extract with a date time filter.""" test_front_end = image_export.ImageExportFrontend() options = frontend.Options() options.image = self._GetTestFilePath([u'image.qcow2']) options.path = self._temp_directory options.include_duplicates = True options.date_filters = [ u'ctime, 2012-05-25 15:59:00, 2012-05-25 15:59:20' ] test_front_end.ParseOptions(options, source_option=u'image') test_front_end.PrintFilterCollection() test_front_end.ProcessSource(options) expected_extracted_files = sorted([ os.path.join(self._temp_directory, u'a_directory'), os.path.join(self._temp_directory, u'a_directory', u'a_file') ]) extracted_files = self._RecursiveList(self._temp_directory) self.assertEqual(sorted(extracted_files), expected_extracted_files)
def testProcessSourceExtractWithDateTimeFilter(self): """Tests extract with a date time filter.""" test_front_end = image_export.ImageExportFrontend() source_path = self._GetTestFilePath([u'image.qcow2']) test_front_end.ParseDateFilters([ u'ctime, 2012-05-25 15:59:00, 2012-05-25 15:59:20']) output_writer = test_lib.StringIOOutputWriter() test_front_end.PrintFilterCollection(output_writer) expected_value = ( u'Filters:\n' u'\tctime between 2012-05-25T15:59:00+00:00 and ' u'2012-05-25T15:59:20+00:00\n') value = output_writer.GetValue() self.assertEqual(value, expected_value) test_front_end.ScanSource(source_path) test_front_end.ProcessSource(self._temp_directory) expected_extracted_files = sorted([ os.path.join(self._temp_directory, u'a_directory'), os.path.join(self._temp_directory, u'a_directory', u'a_file')]) extracted_files = self._RecursiveList(self._temp_directory) self.assertEqual(sorted(extracted_files), expected_extracted_files)
def testProcessSourcesExtractWithSignaturesFilter(self): """Tests the ProcessSources function with a signatures filter.""" output_writer = cli_test_lib.TestOutputWriter(encoding=u'utf-8') test_front_end = image_export.ImageExportFrontend() test_front_end.ParseSignatureIdentifiers(self._DATA_PATH, u'gzip') test_path = self._GetTestFilePath([u'syslog_image.dd']) os_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=test_path) path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_TSK, location=u'/', parent=os_path_spec) with shared_test_lib.TempDirectory() as temp_directory: test_front_end.ProcessSources([path_spec], temp_directory, output_writer) expected_extracted_files = sorted([ os.path.join(temp_directory, u'logs'), os.path.join(temp_directory, u'logs', u'sys.tgz') ]) extracted_files = self._RecursiveList(temp_directory) self.assertEqual(sorted(extracted_files), expected_extracted_files)
def testProcessSourcesExtractWithDateTimeFilter(self): """Tests the ProcessSources function with a date time filter.""" test_front_end = image_export.ImageExportFrontend() test_front_end.ParseDateFilters([ u'ctime, 2012-05-25 15:59:00, 2012-05-25 15:59:20']) # TODO: move to corresponding CLI test. output_writer = test_lib.StringIOOutputWriter() test_front_end.PrintFilterCollection(output_writer) expected_value = ( u'Filters:\n' u'\tctime between 2012-05-25T15:59:00+00:00 and ' u'2012-05-25T15:59:20+00:00\n') value = output_writer.GetValue() self.assertEqual(value, expected_value) test_path = self._GetTestFilePath([u'image.qcow2']) os_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=test_path) qcow_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_QCOW, parent=os_path_spec) path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_TSK, location=u'/', parent=qcow_path_spec) test_front_end.ProcessSources([path_spec], self._temp_directory) expected_extracted_files = sorted([ os.path.join(self._temp_directory, u'a_directory'), os.path.join(self._temp_directory, u'a_directory', u'a_file')]) extracted_files = self._RecursiveList(self._temp_directory) self.assertEqual(sorted(extracted_files), expected_extracted_files)
def testProcessSourcesExtractWithExtensionsFilter(self): """Tests the ProcessSources function with an extensions filter.""" output_writer = cli_test_lib.TestOutputWriter(encoding=u'utf-8') test_front_end = image_export.ImageExportFrontend() test_front_end.ParseExtensionsString(u'txt') test_path = self._GetTestFilePath([u'image.qcow2']) os_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=test_path) qcow_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_QCOW, parent=os_path_spec) path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_TSK, location=u'/', parent=qcow_path_spec) with shared_test_lib.TempDirectory() as temp_directory: test_front_end.ProcessSources([path_spec], temp_directory, output_writer) expected_extracted_files = sorted( [os.path.join(temp_directory, u'passwords.txt')]) extracted_files = self._RecursiveList(temp_directory) self.assertEqual(sorted(extracted_files), expected_extracted_files)
def testProcessSourcesExtractWithNamesFilter(self): """Tests the ProcessSources function with a names filter.""" test_front_end = image_export.ImageExportFrontend() test_front_end.ParseNamesString(u'another_file') test_path = self._GetTestFilePath([u'image.qcow2']) os_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=test_path) qcow_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_QCOW, parent=os_path_spec) path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_TSK, location=u'/', parent=qcow_path_spec) test_front_end.ProcessSources([path_spec], self._temp_directory) expected_extracted_files = sorted([ os.path.join(self._temp_directory, u'a_directory'), os.path.join(self._temp_directory, u'a_directory', u'another_file') ]) extracted_files = self._RecursiveList(self._temp_directory) self.assertEqual(sorted(extracted_files), expected_extracted_files)
def testCalculateDigestHash(self): """Tests the _CalculateDigestHash function.""" test_front_end = image_export.ImageExportFrontend() test_path = self._GetTestFilePath([u'ímynd.dd']) os_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=test_path) tsk_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_TSK, inode=16, location=u'/a_directory/another_file', parent=os_path_spec) file_entry = path_spec_resolver.Resolver.OpenFileEntry(tsk_path_spec) digest_hash = test_front_end._CalculateDigestHash(file_entry, u'') expected_digest_hash = ( u'c7fbc0e821c0871805a99584c6a384533909f68a6bbe9a2a687d28d9f3b10c16' ) self.assertEqual(digest_hash, expected_digest_hash) test_path = self._GetTestFilePath([u'ímynd.dd']) os_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=test_path) tsk_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_TSK, inode=12, location=u'/a_directory', parent=os_path_spec) file_entry = path_spec_resolver.Resolver.OpenFileEntry(tsk_path_spec) with self.assertRaises(IOError): test_front_end._CalculateDigestHash(file_entry, u'')
def testHasFilters(self): """Tests the HasFilters function.""" test_front_end = image_export.ImageExportFrontend() self.assertFalse(test_front_end.HasFilters()) test_front_end = image_export.ImageExportFrontend() test_front_end.ParseDateFilters( [u'ctime, 2012-05-25 15:59:00, 2012-05-25 15:59:20']) self.assertTrue(test_front_end.HasFilters()) test_front_end = image_export.ImageExportFrontend() test_front_end.ParseExtensionsString(u'txt') self.assertTrue(test_front_end.HasFilters()) test_front_end = image_export.ImageExportFrontend() test_front_end.ParseNamesString(u'another_file') self.assertTrue(test_front_end.HasFilters()) test_front_end = image_export.ImageExportFrontend() test_front_end.ParseSignatureIdentifiers(self._DATA_PATH, u'gzip') self.assertTrue(test_front_end.HasFilters())
def testParseSignatureIdentifiers(self): """Tests the ParseSignatureIdentifiers function.""" test_front_end = image_export.ImageExportFrontend() test_front_end.ParseSignatureIdentifiers(self._DATA_PATH, u'gzip') with self.assertRaises(ValueError): test_front_end.ParseSignatureIdentifiers(None, u'gzip') with self.assertRaises(IOError): test_path = os.path.join(os.sep, u'bogus') test_front_end.ParseSignatureIdentifiers(test_path, u'gzip')
def testParseDateFilters(self): """Tests the ParseDateFilters function.""" test_front_end = image_export.ImageExportFrontend() test_front_end.ParseDateFilters( [u'ctime, 2012-05-25 15:59:00, 2012-05-25 15:59:20']) with self.assertRaises(ValueError): test_front_end.ParseDateFilters([u'ctime, 2012-05-25 15:59:00']) with self.assertRaises(ValueError): test_front_end.ParseDateFilters( [u'ctime, 2012-05-25 15:59:00, 2012-05-A5 15:59:20'])
def testProcessSourceExtractWithNamesFilter(self): """Tests extract with a names filter.""" test_front_end = image_export.ImageExportFrontend() source_path = self._GetTestFilePath([u'image.qcow2']) test_front_end.ParseNamesString(u'another_file') test_front_end.ScanSource(source_path) test_front_end.ProcessSource(self._temp_directory) expected_extracted_files = sorted([ os.path.join(self._temp_directory, u'a_directory'), os.path.join(self._temp_directory, u'a_directory', u'another_file')]) extracted_files = self._RecursiveList(self._temp_directory) self.assertEqual(sorted(extracted_files), expected_extracted_files)
def testProcessSourceExtractWithSignaturesFilter(self): """Tests extract with a signatures filter.""" test_front_end = image_export.ImageExportFrontend() source_path = self._GetTestFilePath([u'syslog_image.dd']) test_front_end.ParseSignatureIdentifiers(self._DATA_PATH, u'gzip') test_front_end.ScanSource(source_path) test_front_end.ProcessSource(self._temp_directory) expected_extracted_files = sorted([ os.path.join(self._temp_directory, u'logs'), os.path.join(self._temp_directory, u'logs', u'sys.tgz')]) extracted_files = self._RecursiveList(self._temp_directory) self.assertEqual(sorted(extracted_files), expected_extracted_files)
def __init__(self, input_reader=None, output_writer=None): """Initializes the CLI tool object. Args: input_reader (Optional[InputReader]): input reader, where None indicates that the stdin input reader should be used. output_writer (Optional[OutputWriter]): output writer, where None indicates that the stdout output writer should be used. """ super(ImageExportTool, self).__init__(input_reader=input_reader, output_writer=output_writer) self._destination_path = None self._filter_file = None self._front_end = image_export.ImageExportFrontend() self._skip_duplicates = True self.has_filters = False self.list_signature_identifiers = False
def testExtractFileEntry(self): """Tests the _ExtractFileEntry function.""" output_writer = cli_test_lib.TestOutputWriter(encoding=u'utf-8') test_front_end = image_export.ImageExportFrontend() test_path = self._GetTestFilePath([u'ímynd.dd']) os_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=test_path) tsk_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_TSK, inode=16, location=u'/a_directory/another_file', parent=os_path_spec) with shared_test_lib.TempDirectory() as temp_directory: test_front_end._ExtractFileEntry(tsk_path_spec, temp_directory, output_writer)
def testWriteFileEntry(self): """Tests the _WriteFileEntry function.""" test_front_end = image_export.ImageExportFrontend() test_path = self._GetTestFilePath([u'ímynd.dd']) os_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=test_path) tsk_path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_TSK, inode=16, location=u'/a_directory/another_file', parent=os_path_spec) file_entry = path_spec_resolver.Resolver.OpenFileEntry(tsk_path_spec) with shared_test_lib.TempDirectory() as temp_directory: destination_path = os.path.join(temp_directory, u'another_file') test_front_end._WriteFileEntry(file_entry, u'', destination_path)
def testProcessSourceExtractWithExtensionsFilter(self): """Tests extract with an extensions filter.""" test_front_end = image_export.ImageExportFrontend() options = frontend.Options() options.image = self._GetTestFilePath([u'image.qcow2']) options.path = self._temp_directory options.extensions_string = u'txt' test_front_end.ParseOptions(options, source_option=u'image') test_front_end.ProcessSource(options) expected_extracted_files = sorted( [os.path.join(self._temp_directory, u'passwords.txt')]) extracted_files = self._RecursiveList(self._temp_directory) self.assertEqual(sorted(extracted_files), expected_extracted_files)
def testProcessSourceExtractWithExtensions(self): """Tests extract with extensions process source functionality.""" test_front_end = image_export.ImageExportFrontend() options = test_lib.Options() options.image = self._GetTestFilePath([u'image.qcow2']) options.path = self._temp_directory options.extension_string = u'txt' test_front_end.ParseOptions(options, source_option='image') test_front_end.ProcessSource(options) expected_text_files = sorted([ os.path.join(self._temp_directory, u'passwords.txt')]) text_files = glob.glob(os.path.join(self._temp_directory, u'*')) self.assertEquals(sorted(text_files), expected_text_files)
def __init__(self, input_reader=None, output_writer=None): """Initializes the CLI tool object. Args: input_reader: the input reader (instance of InputReader). The default is None which indicates the use of the stdin input reader. output_writer: the output writer (instance of OutputWriter). The default is None which indicates the use of the stdout output writer. """ super(ImageExportTool, self).__init__(input_reader=input_reader, output_writer=output_writer) self._destination_path = None self._filter_file = None self._front_end = image_export.ImageExportFrontend() self._remove_duplicates = True self.has_filters = False self.list_signature_identifiers = False
def testProcessSourceExtractWithSignaturesFilter(self): """Tests extract with a signatures filter.""" test_front_end = image_export.ImageExportFrontend() options = frontend.Options() options.image = self._GetTestFilePath([u'syslog_image.dd']) options.path = self._temp_directory options.data_location = self._DATA_PATH options.signature_identifiers = u'gzip' test_front_end.ParseOptions(options, source_option=u'image') test_front_end.ProcessSource(options) expected_extracted_files = sorted([ os.path.join(self._temp_directory, u'logs'), os.path.join(self._temp_directory, u'logs', u'sys.tgz') ]) extracted_files = self._RecursiveList(self._temp_directory) self.assertEqual(sorted(extracted_files), expected_extracted_files)
def testProcessSourceExtractWithDateFilter(self): """Tests extract with file filter and date filter functionality.""" test_front_end = image_export.ImageExportFrontend() options = test_lib.Options() options.image = self._GetTestFilePath([u'image.qcow2']) options.path = self._temp_directory options.include_duplicates = True options.filter = os.path.join(self._temp_directory, u'filter.txt') with open(options.filter, 'wb') as file_object: file_object.write('/a_directory/.+_file\n') test_front_end.ParseOptions(options, source_option='image') # Set the date filter. filter_start = '2012-05-25 15:59:00' filter_end = '2012-05-25 15:59:20' date_filter_object = image_export.DateFilter() date_filter_object.Add( filter_start=filter_start, filter_end=filter_end, filter_type='ctime') image_export.FileSaver.SetDateFilter(date_filter_object) test_front_end.ProcessSource(options) expected_text_files = sorted([ os.path.join(self._temp_directory, u'a_directory', u'a_file')]) text_files = glob.glob(os.path.join( self._temp_directory, u'a_directory', u'*')) self.assertEquals(sorted(text_files), expected_text_files) # We need to reset the date filter to not affect other tests. # pylint: disable-msg=protected-access # TODO: Remove this once filtering has been moved to the front end object. image_export.FileSaver._date_filter = None
def testProcessSourceExtractWithFilter(self): """Tests extract with filter process source functionality.""" test_front_end = image_export.ImageExportFrontend() options = test_lib.Options() options.image = self._GetTestFilePath([u'image.qcow2']) options.path = self._temp_directory options.filter = os.path.join(self._temp_directory, u'filter.txt') with open(options.filter, 'wb') as file_object: file_object.write('/a_directory/.+_file\n') test_front_end.ParseOptions(options, source_option='image') test_front_end.ProcessSource(options) expected_text_files = sorted([ os.path.join(self._temp_directory, u'a_directory', u'another_file'), os.path.join(self._temp_directory, u'a_directory', u'a_file')]) text_files = glob.glob(os.path.join( self._temp_directory, u'a_directory', u'*')) self.assertEquals(sorted(text_files), expected_text_files)
def testParseExtensionsString(self): """Tests the ParseExtensionsString function.""" test_front_end = image_export.ImageExportFrontend() test_front_end.ParseExtensionsString(u'txt')
def testParseNamesString(self): """Tests the ParseNamesString function.""" test_front_end = image_export.ImageExportFrontend() test_front_end.ParseNamesString(u'another_file')
def Main(): """The main function, running the show.""" front_end = image_export.ImageExportFrontend() arg_parser = argparse.ArgumentParser( description=( u'This is a simple collector designed to export files inside an ' u'image, both within a regular RAW image as well as inside a VSS. ' u'The tool uses a collection filter that uses the same syntax as a ' u'targeted plaso filter.'), epilog=u'And that\'s how you export files, plaso style.') arg_parser.add_argument( u'-d', u'--debug', dest=u'debug', action=u'store_true', default=False, help=u'Turn on debugging information.') arg_parser.add_argument( u'-w', u'--write', action=u'store', dest=u'path', type=unicode, metavar=u'PATH', default=u'.', help=( u'The directory in which extracted files should be stored in.')) arg_parser.add_argument( u'-f', u'--filter', action=u'store', dest=u'filter', type=unicode, metavar=u'FILTER_FILE', help=( u'Full path to the file that contains the collection filter, ' u'the file can use variables that are defined in preprocesing, ' u'just like any other log2timeline/plaso collection filter.')) arg_parser.add_argument( u'--data', action=u'store', dest=u'data_location', type=unicode, metavar=u'PATH', default=None, help=u'the location of the data files.') arg_parser.add_argument( u'--date-filter', u'--date_filter', action=u'append', type=unicode, dest=u'date_filters', metavar=u'TYPE_START_END', default=None, help=( u'Filter based on file entry date and time ranges. This parameter ' u'is formatted as "TIME_VALUE,START_DATE_TIME,END_DATE_TIME" where ' u'TIME_VALUE defines which file entry timestamp the filter applies ' u'to e.g. atime, ctime, crtime, bkup, etc. START_DATE_TIME and ' u'END_DATE_TIME define respectively the start and end of the date ' u'time range. A date time range requires at minimum start or end to ' u'time of the boundary and END defines the end time. Both timestamps ' u'be set. The date time values are formatted as: YYYY-MM-DD ' u'hh:mm:ss.######[+-]##:## Where # are numeric digits ranging from ' u'0 to 9 and the seconds fraction can be either 3 or 6 digits. The ' u'time of day, seconds fraction and timezone offset are optional. ' u'The default timezone is UTC. E.g. "atime, 2013-01-01 23:12:14, ' u'2013-02-23". This parameter can be repeated as needed to add ' u'additional date date boundaries, eg: once for atime, once for ' u'crtime, etc.')) arg_parser.add_argument( u'-x', u'--extensions', dest=u'extensions_string', action=u'store', type=unicode, metavar=u'EXTENSIONS', help=( u'Filter based on file name extensions. This option accepts ' u'multiple multiple comma separated values e.g. "csv,docx,pst".')) arg_parser.add_argument( u'--names', dest=u'names_string', action=u'store', type=str, metavar=u'NAMES', help=( u'If the purpose is to find all files given a certain names ' u'this options should be used. This option accepts a comma separated ' u'string denoting all file names, eg: -x "NTUSER.DAT,UsrClass.dat".')) arg_parser.add_argument( u'--signatures', dest=u'signature_identifiers', action=u'store', type=unicode, metavar=u'IDENTIFIERS', help=( u'Filter based on file format signature identifiers. This option ' u'accepts multiple comma separated values e.g. "esedb,lnk". ' u'Use "list" to show an overview of the supported file format ' u'signatures.')) arg_parser.add_argument( u'--include_duplicates', dest=u'include_duplicates', action=u'store_true', default=False, help=( u'By default if VSS is turned on all files saved will have their ' u'MD5 sum calculated and compared to other files already saved ' u'with the same inode value. If the MD5 sum is the same the file ' u'does not get saved again. This option turns off that behavior ' u'so that all files will get stored, even if they are duplicates.')) front_end.AddImageOptions(arg_parser) front_end.AddVssProcessingOptions(arg_parser) arg_parser.add_argument( u'image', nargs='?', action=u'store', metavar=u'IMAGE', default=None, type=unicode, help=( u'The full path to the image file that we are about to extract files ' u'from, it should be a raw image or another image that plaso ' u'supports.')) options = arg_parser.parse_args() format_str = u'%(asctime)s [%(levelname)s] %(message)s' if options.debug: logging.basicConfig(level=logging.DEBUG, format=format_str) else: logging.basicConfig(level=logging.INFO, format=format_str) if not getattr(options, u'data_location', None): # Determine if we are running from the source directory. options.data_location = os.path.dirname(__file__) options.data_location = os.path.dirname(options.data_location) options.data_location = os.path.join(options.data_location, u'data') if not os.path.exists(options.data_location): # Otherwise determine if there is shared plaso data location. options.data_location = os.path.join(sys.prefix, u'share', u'plaso') if not os.path.exists(options.data_location): logging.warning(u'Unable to automatically determine data location.') options.data_location = None if getattr(options, u'signature_identifiers', u'') == u'list': front_end.ListSignatureIdentifiers(options) return True has_filter = False if getattr(options, u'date_filters', []): has_filter = True if getattr(options, u'extensions_string', u''): has_filter = True if getattr(options, u'filter', u''): has_filter = True if getattr(options, u'signature_identifiers', u''): has_filter = True if not has_filter: logging.warning(u'No filter defined exporting all files.') try: front_end.ParseOptions(options, source_option='image') except errors.BadConfigOption as exception: arg_parser.print_help() print u'' logging.error(u'{0:s}'.format(exception)) return False # TODO: print more status information like PrintOptions. front_end.PrintFilterCollection() try: front_end.ProcessSource(options) logging.info(u'Processing completed.') except (KeyboardInterrupt, errors.UserAbort): logging.warning(u'Aborted by user.') return False except errors.SourceScannerError as exception: logging.warning(( u'Unable to scan for a supported filesystem with error: {0:s}\n' u'Most likely the image format is not supported by the ' u'tool.').format(exception)) return False return True