def GetEventHeader(event_object, descriptions, exclude_timestamp): """Returns a list of strings that contains a header for the event. Args: event_object: An event object (instance of event.EventObject). descriptions: A list of strings describing the value of the header timestamp. exclude_timestamp: A boolean. If it is set to True the method will not include the timestamp in the header. Returns: A list of strings containing header information for the event. """ format_string = GetFormatString(event_object) # Create the strings to return. ret_strings = [] ret_strings.append(u'Key information.') if not exclude_timestamp: for description in descriptions: ret_strings.append(format_string.format( description, timelib.Timestamp.CopyToIsoFormat( event_object.timestamp))) if hasattr(event_object, 'keyname'): ret_strings.append(format_string.format(u'Key Path', event_object.keyname)) if event_object.timestamp_desc != eventdata.EventTimestamp.WRITTEN_TIME: ret_strings.append(format_string.format( u'Description', event_object.timestamp_desc)) ret_strings.append(frontend_utils.FormatHeader(u'Data', u'-')) return ret_strings
def GetListOfAllPlugins(self): """Returns information about the supported plugins.""" return_strings = [] # TODO: replace frontend_utils.FormatHeader by frontend function. return_strings.append(frontend_utils.FormatHeader(u'Supported Plugins')) all_plugins = parsers_manager.ParsersManager.GetWindowsRegistryPlugins() return_strings.append(frontend_utils.FormatHeader(u'Key Plugins')) for plugin_obj in all_plugins.GetAllKeyPlugins(): return_strings.append(frontend_utils.FormatOutputString( plugin_obj.NAME[self.PLUGIN_UNIQUE_NAME_START:], plugin_obj.DESCRIPTION)) return_strings.append(frontend_utils.FormatHeader(u'Value Plugins')) for plugin_obj in all_plugins.GetAllValuePlugins(): return_strings.append(frontend_utils.FormatOutputString( plugin_obj.NAME[self.PLUGIN_UNIQUE_NAME_START:], plugin_obj.DESCRIPTION)) return u'\n'.join(return_strings)
def PrintHeader(options): """Print header information, including library versions.""" print frontend_utils.FormatHeader('File Parsed') print u'{:>20s}'.format(options.file_to_parse) print frontend_utils.FormatHeader('Versions') print frontend_utils.FormatOutputString('plaso engine', plaso.GetVersion()) print frontend_utils.FormatOutputString('pyevt', pyevt.get_version()) print frontend_utils.FormatOutputString('pyevtx', pyevtx.get_version()) print frontend_utils.FormatOutputString('pylnk', pylnk.get_version()) print frontend_utils.FormatOutputString('pymsiecf', pymsiecf.get_version()) print frontend_utils.FormatOutputString('pyregf', pyregf.get_version()) if options.filter: print frontend_utils.FormatHeader('Filter Used') print frontend_utils.FormatOutputString('Filter String', options.filter) if options.parsers: print frontend_utils.FormatHeader('Parser Filter Used') print frontend_utils.FormatOutputString('Parser String', options.parsers)
def GetStats(profiler): """Print verbose information from profiler and return a stats object.""" stats = pstats.Stats(profiler, stream=sys.stdout) print frontend_utils.FormatHeader('Profiler') print '\n{:-^20}'.format(' Top 10 Time Spent ') stats.sort_stats('cumulative') stats.print_stats(10) print '\n{:-^20}'.format(' Sorted By Function Calls ') stats.sort_stats('calls') stats.print_stats() return stats
def ParseCurrentKey(self, line): """Parse the current key.""" if 'true' in line.lower(): verbose = True elif '-v' in line.lower(): verbose = True else: verbose = False if not IsLoaded(): return current_hive = preg.PregCache.hive_storage.loaded_hive if not current_hive: return # Clear the last results from parse key. preg.PregCache.events_from_last_parse = [] print_strings = preg.ParseKey(key=current_hive.GetCurrentRegistryKey(), hive_helper=current_hive, shell_helper=preg.PregCache.shell_helper, verbose=verbose) self.output_writer.write(u'\n'.join(print_strings)) # Print out a hex dump of all binary values. if verbose: header_shown = False for value in current_hive.GetCurrentRegistryKey().GetValues(): if value.DataIsBinaryData(): if not header_shown: header_shown = True print frontend_utils.FormatHeader('Hex Dump') # Print '-' 80 times. self.output_writer.write(u'-' * 80) self.output_writer.write(u'\n') self.output_writer.write( frontend_utils.FormatOutputString( 'Attribute', value.name)) self.output_writer.write(u'-' * 80) self.output_writer.write(u'\n') self.output_writer.write( frontend_utils.OutputWriter.GetHexDump(value.data)) self.output_writer.write(u'\n') self.output_writer.write(u'+-' * 40) self.output_writer.write(u'\n') self.output_writer.flush()
def GetEventBody(event_object, file_entry=None, show_hex=False): """Returns a list of strings containing information from an event. Args: event_object: An event object (instance of event.EventObject). file_entry: An optional file entry object (instance of dfvfs.FileEntry) that the event originated from. Default is None. show_hex: A boolean, if set to True hex dump of the value is included in the output. The default value is False. Returns: A list of strings containing the event body. """ format_string = GetFormatString(event_object) ret_strings = [] timestamp_description = getattr( event_object, 'timestamp_desc', eventdata.EventTimestamp.WRITTEN_TIME) if timestamp_description != eventdata.EventTimestamp.WRITTEN_TIME: ret_strings.append(u'<{0:s}>'.format(timestamp_description)) if hasattr(event_object, 'regvalue'): attributes = event_object.regvalue else: # TODO: Add a function for this to avoid repeating code. keys = event_object.GetAttributes().difference( event_object.COMPARE_EXCLUDE) keys.discard('offset') keys.discard('timestamp_desc') attributes = {} for key in keys: attributes[key] = getattr(event_object, key) for attribute, value in attributes.items(): ret_strings.append(format_string.format(attribute, value)) if show_hex and file_entry: event_object.pathspec = file_entry.path_spec ret_strings.append(frontend_utils.FormatHeader( u'Hex Output From Event.', '-')) ret_strings.append( frontend_utils.OutputWriter.GetEventDataHexDump(event_object)) return ret_strings
def ProcessStorage(options): """Process a storage file and produce profile results. Args: options: the command line arguments (instance of argparse.Namespace). Returns: The profiling statistics or None on error. """ storage_parameters = options.storage.split() storage_parameters.append(options.file_to_parse) if options.filter: storage_parameters.append(options.filter) front_end = psort.PsortFrontend() try: front_end.ParseOptions(options) except errors.BadConfigOption as exception: logging.error(u'{0:s}'.format(exception)) return if options.verbose: # TODO: why not move this functionality into psort? profiler = cProfile.Profile() profiler.enable() else: time_start = time.time() # Call psort and process output. _ = front_end.ParseStorage(options) if options.verbose: profiler.disable() else: time_end = time.time() if options.verbose: return GetStats(profiler) else: print frontend_utils.FormatHeader('Time Used') print u'{:>20f}s'.format(time_end - time_start)
def ListPluginInformation(self): """Lists all plugin and parser information.""" plugin_list = self._GetPluginData() return_string_pieces = [] return_string_pieces.append( u'{:=^80}'.format(u' log2timeline/plaso information. ')) for header, data in plugin_list.items(): # TODO: Using the frontend utils here instead of "self.PrintHeader" # since the desired output here is a string that can be sent later # to an output writer. Change this entire function so it can utilize # PrintHeader or something similar. return_string_pieces.append(frontend_utils.FormatHeader(header)) for entry_header, entry_data in sorted(data): return_string_pieces.append( frontend_utils.FormatOutputString(entry_header, entry_data)) return_string_pieces.append(u'') self._output_writer.Write(u'\n'.join(return_string_pieces))
def ProcessStorage(options): """Process a storage file and produce profile results. Args: options: the command line arguments (instance of argparse.Namespace). Returns: The profiling statistics or None on error. """ storage_parameters = options.storage.split() storage_parameters.append(options.file_to_parse) if options.filter: storage_parameters.append(options.filter) if options.verbose: # TODO: why not move this functionality into psort? profiler = cProfile.Profile() profiler.enable() else: time_start = time.time() # Call psort and process output. return_value = psort.Main(storage_parameters) if options.verbose: profiler.disable() else: time_end = time.time() if return_value: print u'Parsed storage file.' else: print u'It appears the storage file may not have processed correctly.' if options.verbose: return GetStats(profiler) else: print frontend_utils.FormatHeader('Time Used') print u'{:>20f}s'.format(time_end - time_start)
def Main(arguments=None): """Start the tool.""" multiprocessing.freeze_support() front_end = psort.PsortFrontend() arg_parser = argparse.ArgumentParser( description=(u'PSORT - Application to read, filter and process ' u'output from a plaso storage file.'), add_help=False) tool_group = arg_parser.add_argument_group(u'Optional arguments For psort') output_group = arg_parser.add_argument_group( u'Optional arguments for output modules') analysis_group = arg_parser.add_argument_group( u'Optional arguments for analysis modules') tool_group.add_argument(u'-d', u'--debug', action=u'store_true', dest=u'debug', default=False, help=u'Fall back to debug shell if psort fails.') tool_group.add_argument(u'-q', u'--quiet', action=u'store_true', dest=u'quiet', default=False, help=u'Do not print a summary after processing.') tool_group.add_argument(u'-h', u'--help', action=u'help', help=u'Show this help message and exit.') tool_group.add_argument( u'-a', u'--include_all', action=u'store_false', dest=u'dedup', default=True, help=( u'By default the psort removes duplicate entries from the output. ' u'This parameter changes that behavior so all events are included.' )) tool_group.add_argument( u'-o', u'--output_format', u'--output-format', metavar=u'FORMAT', dest=u'output_format', default=u'dynamic', help=(u'The output format. Use "-o list" to see a list of available ' u'output formats.')) tool_group.add_argument( u'--analysis', metavar=u'PLUGIN_LIST', dest=u'analysis_plugins', default=u'', action=u'store', type=unicode, help=(u'A comma separated list of analysis plugin names to be loaded ' u'or "--analysis list" to see a list of available plugins.')) tool_group.add_argument(u'--data', metavar=u'PATH', dest=u'data_location', default=u'', action=u'store', type=unicode, help=u'The location of the analysis data.') tool_group.add_argument( u'--language', metavar=u'LANGUAGE', dest=u'preferred_language', default=u'en-US', type=unicode, help=( u'The preferred language identifier for Windows Event Log message ' u'strings. Use "--language list" to see a list of available ' u'language identifiers. Note that formatting will fall back on ' u'en-US (LCID 0x0409) if the preferred language is not available ' u'in the database of message string templates.')) tool_group.add_argument( u'-z', u'--zone', metavar=u'TIMEZONE', dest=u'timezone', default=u'UTC', type=unicode, help=(u'The timezone in which to represent the date and time values. ' u'Use "-z list" to see a list of available timezones.')) tool_group.add_argument(u'-w', u'--write', metavar=u'OUTPUTFILE', dest=u'write', help=u'Output filename, defaults to stdout.') tool_group.add_argument( u'--slice', metavar=u'DATE', dest=u'slice', type=str, default=u'', action=u'store', help= (u'Create a time slice around a certain date. This parameter, if ' u'defined will display all events that happened X minutes before and ' u'after the defined date. X is controlled by the parameter ' u'--slice_size but defaults to 5 minutes.')) tool_group.add_argument( u'--slicer', dest=u'slicer', action=u'store_true', default=False, help= (u'Create a time slice around every filter match. This parameter, if ' u'defined will save all X events before and after a filter match has ' u'been made. X is defined by the --slice_size parameter.')) tool_group.add_argument( u'--slice_size', dest=u'slice_size', type=int, default=5, action=u'store', help=( u'Defines the slice size. In the case of a regular time slice it ' u'defines the number of minutes the slice size should be. In the ' u'case of the --slicer it determines the number of events before ' u'and after a filter match has been made that will be included in ' u'the result set. The default value is 5]. See --slice or --slicer ' u'for more details about this option.')) tool_group.add_argument( u'-v', u'--version', dest=u'version', action=u'version', version=u'log2timeline - psort version {0:s}'.format( plaso.GetVersion()), help=u'Show the current version of psort.') front_end.AddStorageFileOptions(tool_group) tool_group.add_argument( u'filter', nargs=u'?', action=u'store', metavar=u'FILTER', default=None, type=unicode, help=(u'A filter that can be used to filter the dataset before it ' u'is written into storage. More information about the filters' u' and it\'s usage can be found here: http://plaso.kiddaland.' u'net/usage/filters')) if arguments is None: arguments = sys.argv[1:] # Add the output module options. if u'-o' in arguments: argument_index = arguments.index(u'-o') + 1 elif u'--output_format' in arguments: argument_index = arguments.index(u'--output_format') + 1 elif u'--output-format' in arguments: argument_index = arguments.index(u'--output-format') + 1 else: argument_index = 0 if argument_index > 0: module_names = arguments[argument_index] front_end.AddOutputModuleOptions(output_group, [module_names]) # Add the analysis plugin options. if u'--analysis' in arguments: argument_index = arguments.index(u'--analysis') + 1 # Get the names of the analysis plugins that should be loaded. plugin_names = arguments[argument_index] try: front_end.AddAnalysisPluginOptions(analysis_group, plugin_names) except errors.BadConfigOption as exception: arg_parser.print_help() print u'' logging.error(u'{0:s}'.format(exception)) return False options = arg_parser.parse_args(args=arguments) format_str = u'[%(levelname)s] %(message)s' if getattr(options, u'debug', False): logging.basicConfig(level=logging.DEBUG, format=format_str) else: logging.basicConfig(level=logging.INFO, format=format_str) have_list_option = False if options.analysis_plugins == u'list': front_end.ListAnalysisPlugins() have_list_option = True if options.output_format == u'list': front_end.ListOutputModules() have_list_option = True if options.preferred_language == u'list': front_end.ListLanguageIdentifiers() have_list_option = True if options.timezone == u'list': front_end.ListTimeZones() have_list_option = True if have_list_option: return True if not getattr(options, u'data_location', None): # Determine if we are running from the source directory. options.data_location = os.path.dirname(__file__) options.data_location = os.path.dirname(options.data_location) options.data_location = os.path.join(options.data_location, u'data') if not os.path.exists(options.data_location): # Otherwise determine if there is shared plaso data location. options.data_location = os.path.join(sys.prefix, u'share', u'plaso') if not os.path.exists(options.data_location): logging.warning( u'Unable to automatically determine data location.') options.data_location = None try: front_end.ParseOptions(options) except errors.BadConfigOption as exception: arg_parser.print_help() print u'' logging.error(u'{0:s}'.format(exception)) return False if front_end.preferred_encoding == u'ascii': logging.warning( u'The preferred encoding of your system is ASCII, which is not optimal ' u'for the typically non-ASCII characters that need to be parsed and ' u'processed. The tool will most likely crash and die, perhaps in a way ' u'that may not be recoverable. A five second delay is introduced to ' u'give you time to cancel the runtime and reconfigure your preferred ' u'encoding, otherwise continue at own risk.') time.sleep(5) try: counter = front_end.ProcessStorage(options) if not options.quiet: logging.info(frontend_utils.FormatHeader(u'Counter')) for element, count in counter.most_common(): logging.info(frontend_utils.FormatOutputString(element, count)) except IOError as exception: # Piping results to "|head" for instance causes an IOError. if u'Broken pipe' not in exception: logging.error( u'Processing stopped early: {0:s}.'.format(exception)) except KeyboardInterrupt: pass # Catching every remaining exception in case we are debugging. except Exception as exception: if not options.debug: raise logging.error(u'{0:s}'.format(exception)) pdb.post_mortem() return True
def ProcessFile(options): """Process a file and produce profile results.""" if options.proto_file and os.path.isfile(options.proto_file): with open(options.proto_file) as fh: proto_string = fh.read() proto = transmission_pb2.PathSpec() try: text_format.Merge(proto_string, proto) except text_format.ParseError as exception: logging.error( u'Unable to parse file, error: {}'.format(exception)) sys.exit(1) serializer = protobuf_serializer.ProtobufPathSpecSerializer path_spec = serializer.ReadSerializedObject(proto) else: path_spec = path_spec_factory.Factory.NewPathSpec( definitions.TYPE_INDICATOR_OS, location=options.file_to_parse) file_entry = path_spec_resolver.Resolver.OpenFileEntry(path_spec) if file_entry is None: logging.error(u'Unable to open file: {0:s}'.format( options.file_to_parse)) sys.exit(1) # Set few options the engine expects to be there. # TODO: Can we rather set this directly in argparse? options.single_process = True options.debug = False options.text_prepend = u'' # Set up the engine. collection_queue = queue.SingleThreadedQueue() storage_queue = queue.SingleThreadedQueue() parse_error_queue = queue.SingleThreadedQueue() engine_object = engine.Engine(collection_queue, storage_queue, parse_error_queue) # Create a worker. worker_object = engine_object.CreateExtractionWorker('0') # TODO: add support for parser_filter_string. worker_object.InitalizeParserObjects() if options.verbose: profiler = cProfile.Profile() profiler.enable() else: time_start = time.time() worker_object.ParseFileEntry(file_entry) if options.verbose: profiler.disable() else: time_end = time.time() engine_object.SignalEndOfInputStorageQueue() event_object_consumer = PprofEventObjectQueueConsumer(storage_queue) event_object_consumer.ConsumeEventObjects() if not options.verbose: print frontend_utils.FormatHeader('Time Used') print u'{:>20f}s'.format(time_end - time_start) print frontend_utils.FormatHeader('Parsers Loaded') # Accessing protected member. # pylint: disable=protected-access plugins = [] for parser_object in sorted(worker_object._parser_objects): print frontend_utils.FormatOutputString('', parser_object.NAME) parser_plugins = getattr(parser_object, '_plugins', []) plugins.extend(parser_plugins) print frontend_utils.FormatHeader('Plugins Loaded') for plugin in sorted(plugins): if isinstance(plugin, basestring): print frontend_utils.FormatOutputString('', plugin) else: plugin_string = getattr(plugin, 'NAME', u'N/A') print frontend_utils.FormatOutputString('', plugin_string) print frontend_utils.FormatHeader('Parsers Used') for parser in sorted(event_object_consumer.parsers): print frontend_utils.FormatOutputString('', parser) print frontend_utils.FormatHeader('Plugins Used') for plugin in sorted(event_object_consumer.plugins): print frontend_utils.FormatOutputString('', plugin) print frontend_utils.FormatHeader('Counter') for key, value in event_object_consumer.counter.most_common(): print frontend_utils.FormatOutputString(key, value) if options.verbose: return GetStats(profiler)
def RunModeConsole(front_end, options): """Open up an iPython console. Args: options: the command line arguments (instance of argparse.Namespace). """ namespace = {} function_name_length = 23 banners = [] banners.append( frontend_utils.FormatHeader( u'Welcome to PREG - home of the Plaso Windows Registry Parsing.')) banners.append(u'') banners.append(u'Some of the commands that are available for use are:') banners.append(u'') banners.append( frontend_utils.FormatOutputString( u'cd key', u'Navigate the Registry like a directory structure.', function_name_length)) banners.append( frontend_utils.FormatOutputString( u'ls [-v]', (u'List all subkeys and values of a Registry key. If called as ' u'ls True then values of keys will be included in the output.'), function_name_length)) banners.append( frontend_utils.FormatOutputString( u'parse -[v]', u'Parse the current key using all plugins.', function_name_length)) banners.append( frontend_utils.FormatOutputString( u'pwd', u'Print the working "directory" or the path of the current key.', function_name_length)) banners.append( frontend_utils.FormatOutputString(u'plugin [-h] plugin_name', ( u'Run a particular key-based plugin on the loaded hive. The correct ' u'Registry key will be loaded, opened and then parsed.'), function_name_length)) banners.append( frontend_utils.FormatOutputString( u'get_value value_name', (u'Get a value from the currently loaded Registry key.'))) banners.append( frontend_utils.FormatOutputString( u'get_value_data value_name', (u'Get a value data from a value stored in the currently loaded ' u'Registry key.'))) banners.append( frontend_utils.FormatOutputString( u'get_key', u'Return the currently loaded Registry key.')) banners.append(u'') # Build the global cache and prepare the tool. hive_storage = preg.PregStorage() shell_helper = preg.PregHelper(options, front_end, hive_storage) parser_mediator = shell_helper.BuildParserMediator() preg.PregCache.parser_mediator = parser_mediator preg.PregCache.shell_helper = shell_helper preg.PregCache.hive_storage = hive_storage registry_types = getattr(options, 'regfile', None) if isinstance(registry_types, basestring): registry_types = registry_types.split(u',') if not registry_types: registry_types = [ 'NTUSER', 'USRCLASS', 'SOFTWARE', 'SYSTEM', 'SAM', 'SECURITY' ] preg.PregCache.shell_helper.Scan(registry_types) if len(preg.PregCache.hive_storage) == 1: preg.PregCache.hive_storage.SetOpenHive(0) hive_helper = preg.PregCache.hive_storage.loaded_hive banners.append(u'Opening hive: {0:s} [{1:s}]'.format( hive_helper.path, hive_helper.collector_name)) ConsoleConfig.SetPrompt(hive_path=hive_helper.path) loaded_hive = preg.PregCache.hive_storage.loaded_hive if loaded_hive and loaded_hive.name != u'N/A': banners.append(u'Registry hive: {0:s} is available and loaded.'.format( loaded_hive.name)) else: banners.append(u'More than one Registry file ready for use.') banners.append(u'') banners.append(preg.PregCache.hive_storage.ListHives()) banners.append(u'') banners.append( (u'Use "hive open INDEX" to load a hive and "hive list" to see a ' u'list of available hives.')) banners.append(u'') banners.append(u'Happy command line console fu-ing.') # Adding variables in scope. namespace.update(globals()) namespace.update({ 'get_current_key': GetCurrentKey, 'get_key': GetCurrentKey, 'get_value': GetValue, 'get_value_data': GetValueData, 'number_of_hives': GetTotalNumberOfLoadedHives, 'range_of_hives': GetRangeForAllLoadedHives, 'options': options }) ipshell_config = ConsoleConfig.GetConfig() if loaded_hive: ConsoleConfig.SetPrompt(hive_path=loaded_hive.name, config=ipshell_config) else: ConsoleConfig.SetPrompt(hive_path=u'NO HIVE LOADED', config=ipshell_config) # Starting the shell. ipshell = InteractiveShellEmbed(user_ns=namespace, config=ipshell_config, banner1=u'\n'.join(banners), exit_msg='') ipshell.confirm_exit = False # Adding "magic" functions. ipshell.register_magics(MyMagics) # Set autocall to two, making parenthesis not necessary when calling # function names (although they can be used and are necessary sometimes, # like in variable assignments, etc). ipshell.autocall = 2 # Registering command completion for the magic commands. ipshell.set_hook('complete_command', CdCompleter, str_key='%cd') ipshell.set_hook('complete_command', VerboseCompleter, str_key='%ls') ipshell.set_hook('complete_command', VerboseCompleter, str_key='%parse') ipshell.set_hook('complete_command', PluginCompleter, str_key='%plugin') ipshell()
def ParseWithPlugin(self, line): """Parse a Registry key using a specific plugin.""" if not IsLoaded(): print u'No hive loaded, unable to parse.' return current_hive = preg.PregCache.hive_storage.loaded_hive if not current_hive: return if not line: print u'No plugin name added.' return plugin_name = line if '-h' in line: items = line.split() if len(items) != 2: print u'Wrong usage: plugin [-h] PluginName' return if items[0] == '-h': plugin_name = items[1] else: plugin_name = items[0] if not plugin_name.startswith('winreg'): plugin_name = u'winreg_{0:s}'.format(plugin_name) hive_type = current_hive.type plugins_list = parsers_manager.ParsersManager.GetWindowsRegistryPlugins( ) plugin_found = False for plugin_cls in plugins_list.GetKeyPlugins(hive_type): plugin = plugin_cls(reg_cache=current_hive.reg_cache) if plugin.plugin_name == plugin_name: # If we found the correct plugin. plugin_found = True break if not plugin_found: print u'No plugin named: {0:s} available for Registry type {1:s}'.format( plugin_name, hive_type) return if not hasattr(plugin, 'REG_KEYS'): print u'Plugin: {0:s} has no key information.'.format(line) return if '-h' in line: print frontend_utils.FormatHeader(plugin_name) print frontend_utils.FormatOutputString('Description', plugin.__doc__) print u'' for registry_key in plugin.expanded_keys: print frontend_utils.FormatOutputString( 'Registry Key', registry_key) return if not plugin.expanded_keys: plugin.ExpandKeys(preg.PregCache.parser_mediator) # Clear the last results from parse key. preg.PregCache.events_from_last_parse = [] # Defining outside of for loop for optimization. get_key_by_path = current_hive.GetKeyByPath for registry_key in plugin.expanded_keys: key = get_key_by_path(registry_key) if not key: print u'Key: {0:s} not found'.format(registry_key) continue # Move the current location to the key to be parsed. self.ChangeDirectory(registry_key) # Parse the key. print_strings = preg.ParseKey( key=current_hive.GetCurrentRegistryKey(), hive_helper=current_hive, shell_helper=preg.PregCache.shell_helper, verbose=False, use_plugins=[plugin_name]) self.output_writer.write(u'\n'.join(print_strings)) self.output_writer.flush()