def run(self): automagics = automagic.choose_automagic( automagic.available(self._context), netscan.NetScan) plugin_netscan = plugins.construct_plugin(self.context, automagics, netscan.NetScan, self.config_path, self._progress_callback, self._file_consumer) output_netscan = plugin_netscan._generator() filter_func = pslist.PsList.create_pid_filter( [self.config.get('pid', None)]) automagics = automagic.choose_automagic( automagic.available(self._context), malfind.Malfind) plugin_malfind = plugins.construct_plugin(self.context, automagics, malfind.Malfind, self.config_path, self._progress_callback, self._file_consumer) output_malfind = plugin_malfind._generator( pslist.PsList.list_processes( context=self.context, layer_name=self.config['primary'], symbol_table=self.config['nt_symbols'], filter_func=filter_func)) malfind_pids = [] for row in output_malfind: _depth, row_data = row row_data = [ "N/A" if isinstance(i, renderers.UnreadableValue) or isinstance(i, renderers.UnparsableValue) else i for i in row_data ] malfind_pids.append(int(row_data[0])) filter_func = pslist.PsList.create_pid_filter(malfind_pids) return renderers.TreeGrid( [("PID", int), ("PPID", int), ("Process", str), ("Name", str), ("Path", str), ("PEHeader", str), ("MD5", str), ("VirusTotal", str), ("LocalAddr", str), ("ForeignAddr", str), ("State", str), ("SuspiciousIP", str)], self._generator( pslist.PsList.list_processes( context=self.context, layer_name=self.config['primary'], symbol_table=self.config['nt_symbols'], filter_func=filter_func), output_netscan))
def init(self, plugin_class, memdump): """ Module which initialize all volatility 3 internals @param plugin_class: plugin class. Ex. windows.pslist.PsList @param memdump: path to memdump. Ex. file:///home/vol3/memory.dmp @return: Volatility3 interface. """ volatility.framework.require_interface_version(1, 0, 0) # Set the PARALLELISM #constants.PARALLELISM = constants.Parallelism.Multiprocessing #constants.PARALLELISM = constants.Parallelism.Threading constants.PARALLELISM = constants.Parallelism.Off # Do the initialization self.context = contexts.Context() # Construct a blank context # Will not log as console's default level is WARNING failures = framework.import_files(volatility.plugins, True) self.automagics = automagic.available(self.context) # Initialize the list of plugins in case the plugin needs it plugin_list = framework.list_plugins() self.context.config['automagic.LayerStacker.single_location'] = self.memdump self.automagics = automagic.choose_automagic(self.automagics, plugin_class) volatility_interface = plugins.construct_plugin( self.context, self.automagics, plugin_class, self.base_config_path, None, None) return volatility_interface
def extract_syscall_table(self, event): ctx = event.context automagics = event.automagics plugin_list = event.plugin_list self.logger.info('Extracting the NT syscall table') try: plugin = plugin_list['windows.ssdt.SSDT'] except KeyError as e: raise RuntimeError("Plugin not found") from e automagics = automagic.choose_automagic(automagics, plugin) constructed = plugins.construct_plugin(ctx, automagics, plugin, BASE_CONFIG_PATH, None, None) treegrid = constructed.run() renderer = JsonRenderer() renderer.render(treegrid) result = renderer.get_result() sdt = self.parse_ssdt_output(result) if self.neo4j_enabled: self.insert_neo4j_db(sdt) if self.debug: # print syscalls on debug output for table_name, table in sdt.items(): self.logger.debug('Displaying table %s', table_name) for syscall in table: self.logger.debug('[%s]: %s %s', syscall['Index'], syscall['Symbol'], hex(syscall['Address']))
def configure_plugin(self, plugin_name, **kwargs): """Configure and return a plugin Args: plugin_name: name of the plugin to configure, as a string. **kwargs: configuration options passed to the plugin Returns: The instantiated and configure volatility plugin. """ plugin = self.plugins[plugin_name] # Set arguments for key, value in kwargs.items(): config_path = path_join("plugins", plugin.__name__, key) self.vol_ctx.config[config_path] = value # Filter automagics available_automagics = automagic.available(self.vol_ctx) automagics = automagic.choose_automagic(available_automagics, plugin) # Instantiate the plugin return volplugins.construct_plugin( self.vol_ctx, automagics, plugin, "plugins", MuteProgress(), FileHandlerInterface, )
def run(self): automagics = automagic.choose_automagic( automagic.available(self._context), malfind.Malfind) plugin_malfind = plugins.construct_plugin(self.context, automagics, malfind.Malfind, self.config_path, self._progress_callback, self._file_consumer) output_malfind = plugin_malfind._generator( pslist.PsList.list_processes( context=self.context, layer_name=self.config['primary'], symbol_table=self.config['nt_symbols'])) malfind_pids = [] for row in output_malfind: _depth, row_data = row row_data = [ "N/A" if isinstance(i, renderers.UnreadableValue) or isinstance(i, renderers.UnparsableValue) else i for i in row_data ] malfind_pids.append(str(row_data[0])) return renderers.TreeGrid([("Offset", str), ("EventID", str), ("Valid", str), ("Time", datetime.datetime), ("PID", str), ("ThreadID", str), ("EventRecordID", str), ("Channel", str), ("Provider", str), ("Sec-UserID", str), ("Data", str)], self._generator(show_corrupt_results=True, pids=malfind_pids))
def run(self): """Isolate each plugin and run it.""" # Use all the plugins if there's no filter self.usable_plugins = self.usable_plugins or self.get_usable_plugins() self.automagics = self.automagics or automagic.available(self._context) plugins_to_run = [] filter_list = self.config['plugin-filter'] # Identify plugins that we can run which output datetimes for plugin_class in self.usable_plugins: try: automagics = automagic.choose_automagic( self.automagics, plugin_class) plugin = plugins.construct_plugin(self.context, automagics, plugin_class, self.config_path, self._progress_callback, self._file_consumer) if isinstance(plugin, TimeLinerInterface): if not len(filter_list) or any([ filter in plugin.__module__ + '.' + plugin.__class__.__name__ for filter in filter_list ]): plugins_to_run.append(plugin) except exceptions.UnsatisfiedException as excp: # Remove the failed plugin from the list and continue vollog.debug("Unable to satisfy {}: {}".format( plugin_class.__name__, excp.unsatisfied)) continue if self.config.get('record-config', False): total_config = {} for plugin in plugins_to_run: old_dict = dict(plugin.build_configuration()) for entry in old_dict: total_config[interfaces.configuration.path_join( plugin.__class__.__name__, entry)] = old_dict[entry] filedata = interfaces.plugins.FileInterface("config.json") with io.TextIOWrapper(filedata.data, write_through=True) as fp: json.dump(total_config, fp, sort_keys=True, indent=2) self.produce_file(filedata) return renderers.TreeGrid(columns=[ ("Plugin", str), ("Description", str), ("Created Date", datetime.datetime), ("Modified Date", datetime.datetime), ("Accessed Date", datetime.datetime), ("Changed Date", datetime.datetime) ], generator=self._generator(plugins_to_run))
def __init__(self, image_path, plugin_name, plugins_path): framework.require_interface_version(2, 0, 0) # import the plugin files failures = framework.import_files(volatility.plugins, True) # load the framework plugins list plugin_list = framework.list_plugins() plugin_info = self.getPlugin(plugin_name) if plugin_info is None: raise Exception("Plugin information for [" + plugin_name + "] not found") plugin_info['obj'] = plugin_list[plugin_info['name']] # image path file_name = os.path.abspath(image_path) if not os.path.exists(file_name): raise Exception("File does not exist: {}".format(file_name)) # set context self.ctx = contexts.Context() # Construct a blank context automagics = automagic.available(self.ctx) automagics = automagic.choose_automagic(automagics, plugin_info['obj']) single_location = "file:" + request.pathname2url(file_name) self.ctx.config[ 'automagic.LayerStacker.single_location'] = single_location # build plugin context base_config_path = os.path.abspath(plugins_path) progress_callback = PrintedProgress() constructed = plugins.construct_plugin( self.ctx, automagics, plugin_info['obj'], base_config_path, progress_callback, self.file_handler_class_factory()) # run the plugin and render the results to json treegrid = constructed.run() #renderers = dict([(x.name.lower(), x) for x in framework.class_subclasses(text_renderer.CLIRenderer)]) #renderers['quick']().render(treegrid) #print(treegrid) results = self.render(treegrid) #print(results) self.removeKeySpace(results) # go for the function that parse the json results to get clean output self.results = plugin_info['function'](results)
def run(self): # we aren't really doing logging, but you can change these numbers to get more details vollog = logging.getLogger(__name__) vollog = logging.getLogger() # vollog.setLevel(1000) console = logging.StreamHandler() #console.setLevel(logging.WARNING) formatter = logging.Formatter( '%(levelname)-8s %(name)-12s: %(message)s') console.setFormatter(formatter) vollog.addHandler(console) volatility.framework.require_interface_version(1, 0, 0) # also change here for log level #console.setLevel(1000) constants.PARALLELISM = constants.Parallelism.Off ctx = contexts.Context() # Construct a blank context failures = framework.import_files( volatility.plugins, True) # Will not log as console's default level is WARNING automagics = automagic.available(ctx) plugin_list = framework.list_plugins() seen_automagics = set() configurables_list = {} for amagic in automagics: if amagic in seen_automagics: continue seen_automagics.add(amagic) if isinstance(amagic, interfaces.configuration.ConfigurableInterface): configurables_list[amagic.__class__.__name__] = amagic plugin_name = "linux.pstree.PsTree" # we're just "kinda" running a plugin plugin = plugin_list[plugin_name] base_config_path = "plugins" plugin_config_path = interfaces.configuration.path_join( base_config_path, plugin.__name__) # It should be up to the UI to determine which automagics to run, so this is before BACK TO THE FRAMEWORK automagics = automagic.choose_automagic(automagics, plugin) # this is our fake file that represents QEMU memory single_location = "file:" + pathname2url("/panda.panda") ctx.config['automagic.LayerStacker.single_location'] = single_location constructed = plugins.construct_plugin(ctx, automagics, plugin, base_config_path, MuteProgress(), self) return constructed
def list(self, plugin_name = None): """Returns an automagic list of all the automagic objects""" amagics = list(self.get_automagics()) suggested = amagics if plugin_name: plugin = PluginsApi.get_plugins().get(plugin_name) if plugin: suggested = automagic.choose_automagic(amagics, plugin) result = [] for amagic in amagics: amagic_name = amagic.__class__.__name__ amagic_item = { 'name': amagic_name, 'full_name': amagic.__class__.__module__ + "." + amagic_name, 'description': amagic.__doc__[:amagic.__doc__.find("\n")], 'priority': amagic.priority, 'selected': amagic in suggested } result.append(amagic_item) return result
def extract_process_list(self, event): ctx = event.context automagics = event.automagics plugin_list = event.plugin_list self.logger.info('Extracting the process list') try: plugin = plugin_list['windows.pslist.PsList'] except KeyError as e: raise RuntimeError("Plugin not found") from e automagics = automagic.choose_automagic(automagics, plugin) constructed = plugins.construct_plugin(ctx, automagics, plugin, BASE_CONFIG_PATH, None, None) treegrid = constructed.run() renderer = JsonRenderer() renderer.render(treegrid) result = renderer.get_result() processes = self.parse_plugin_output(result) if self.neo4j_enabled: self.insert_neo4j_db(processes) else: # print them on debug output for p in processes: self.logger.debug(p)
def run(self): automagics = automagic.choose_automagic( automagic.available(self._context), netscan.NetScan) plugin_netscan = plugins.construct_plugin(self.context, automagics, netscan.NetScan, self.config_path, self._progress_callback, self._file_consumer) output_netscan = plugin_netscan._generator() filter_func = pslist.PsList.create_pid_filter( [self.config.get('pid', None)]) return renderers.TreeGrid( [("PID", int), ("PPID", int), ("Process", str), ("Name", str), ("Path", str), ("LocalAddr", str), ("ForeignAddr", str), ("State", str), ("SuspiciousIP", str)], self._generator( pslist.PsList.list_processes( context=self.context, layer_name=self.config['primary'], symbol_table=self.config['nt_symbols'], filter_func=filter_func), output_netscan))
pluginname = str(config["plugin"]["value"]) filename = str(config["location"]["file"]) framework.import_files(volatility.plugins, True) renderers = dict([ (x.name.lower(), x) for x in framework.class_subclasses(text_renderer.CLIRenderer) ]) ctx = contexts.Context() single_location = "file:" + request.pathname2url(filename) ctx.config['automagic.LayerStacker.single_location'] = single_location automagics = automagic.available(ctx) plugin_list = framework.list_plugins() plugin = plugin_list[pluginname] automagics = automagic.choose_automagic(automagics, plugin) if ctx.config.get('automagic.LayerStacker.stackers', None) is None: ctx.config['automagic.LayerStacker.stackers'] = stacker.choose_os_stackers( plugin) base_config_path = "plugins" plugin_config_path = interfaces.configuration.path_join( base_config_path, plugin.__name__) progress_callback = volatility.cli.MuteProgress() constructed = plugins.construct_plugin(ctx, automagics, plugin, base_config_path, progress_callback, volatility.cli) renderers["syslog"]().render(constructed.run())
def run(self, argstring): # Make sure we log everything vollog = logging.getLogger() #vollog.setLevel(1) # Trim the console down by default console = logging.StreamHandler() #console.setLevel(logging.FATAL) formatter = logging.Formatter( '%(levelname)-8s %(name)-12s: %(message)s') console.setFormatter(formatter) vollog.addHandler(console) # Make sure we log everything vollog = logging.getLogger() #vollog.setLevel(1) # Trim the console down by default console = logging.StreamHandler() #console.setLevel(logging.WARNING) formatter = logging.Formatter( '%(levelname)-8s %(name)-12s: %(message)s') console.setFormatter(formatter) vollog.addHandler(console) arg_arr = shlex.split(argstring) """Executes the command line module, taking the system arguments, determining the plugin to run and then running it.""" sys.stdout.write("Volatility 3 Framework {}\n".format( constants.PACKAGE_VERSION)) volatility.framework.require_interface_version(1, 0, 0) renderers = dict([ (x.name.lower(), x) for x in framework.class_subclasses(text_renderer.CLIRenderer) ]) parser = argparse.ArgumentParser( prog='volatility', description="An open-source memory forensics framework") parser.add_argument("-c", "--config", help="Load the configuration from a json file", default=None, type=str) parser.add_argument( "--parallelism", help= "Enables parallelism (defaults to processes if no argument given)", nargs='?', choices=['processes', 'threads', 'off'], const='processes', default=None, type=str) parser.add_argument( "-e", "--extend", help="Extend the configuration with a new (or changed) setting", default=None, action='append') parser.add_argument( "-p", "--plugin-dirs", help="Semi-colon separated list of paths to find plugins", default="", type=str) parser.add_argument( "-s", "--symbol-dirs", help="Semi-colon separated list of paths to find symbols", default="", type=str) parser.add_argument("-v", "--verbosity", help="Increase output verbosity", default=0, action="count") parser.add_argument("-l", "--log", help="Log output to a file as well as the console", default=None, type=str) parser.add_argument( "-o", "--output-dir", help="Directory in which to output any generated files", default=os.path.abspath( os.path.join(os.path.dirname(__file__), '..', '..')), type=str) parser.add_argument("-q", "--quiet", help="Remove progress feedback", default=False, action='store_true') parser.add_argument( "-r", "--renderer", metavar='RENDERER', help="Determines how to render the output ({})".format(", ".join( list(renderers))), default="quick", choices=list(renderers)) parser.add_argument( "-f", "--file", metavar='FILE', default=None, type=str, help= "Shorthand for --single-location=file:// if single-location is not defined" ) parser.add_argument( "--write-config", help="Write configuration JSON file out to config.json", default=False, action='store_true') # We have to filter out help, otherwise parse_known_args will trigger the help message before having # processed the plugin choice or had the plugin subparser added. known_args = [ arg for arg in arg_arr if arg != '--help' and arg != '-h' ] partial_args, _ = parser.parse_known_args(known_args) if partial_args.plugin_dirs: volatility.plugins.__path__ = [ os.path.abspath(p) for p in partial_args.plugin_dirs.split(";") ] + constants.PLUGINS_PATH if partial_args.symbol_dirs: volatility.symbols.__path__ = [ os.path.abspath(p) for p in partial_args.symbol_dirs.split(";") ] + constants.SYMBOL_BASEPATHS if partial_args.log: file_logger = logging.FileHandler(partial_args.log) #file_logger.setLevel(1) file_formatter = logging.Formatter( datefmt='%y-%m-%d %H:%M:%S', fmt='%(asctime)s %(name)-12s %(levelname)-8s %(message)s') file_logger.setFormatter(file_formatter) vollog.addHandler(file_logger) vollog.info("Logging started") if partial_args.verbosity < 3: console.setLevel(30 - (partial_args.verbosity * 10)) else: console.setLevel(10 - (partial_args.verbosity - 2)) #console.setLevel(0) vollog.info("Volatility plugins path: {}".format( volatility.plugins.__path__)) vollog.info("Volatility symbols path: {}".format( volatility.symbols.__path__)) # Set the PARALLELISM if partial_args.parallelism == 'processes': constants.PARALLELISM = constants.Parallelism.Multiprocessing elif partial_args.parallelism == 'threading': constants.PARALLELISM = constants.Parallelism.Threading else: constants.PARALLELISM = constants.Parallelism.Off # Do the initialization ctx = contexts.Context() # Construct a blank context failures = framework.import_files( volatility.plugins, True) # Will not log as console's default level is WARNING if failures: parser.epilog = "The following plugins could not be loaded (use -vv to see why): " + \ ", ".join(sorted(failures)) vollog.info(parser.epilog) automagics = automagic.available(ctx) plugin_list = framework.list_plugins() seen_automagics = set() configurables_list = {} for amagic in automagics: if amagic in seen_automagics: continue seen_automagics.add(amagic) if isinstance(amagic, interfaces.configuration.ConfigurableInterface): self.populate_requirements_argparse(parser, amagic.__class__) configurables_list[amagic.__class__.__name__] = amagic subparser = parser.add_subparsers(title="Plugins", dest="plugin", action=HelpfulSubparserAction) for plugin in sorted(plugin_list): plugin_parser = subparser.add_parser( plugin, help=plugin_list[plugin].__doc__) self.populate_requirements_argparse(plugin_parser, plugin_list[plugin]) configurables_list[plugin] = plugin_list[plugin] ### # PASS TO UI ### # Hand the plugin requirements over to the CLI (us) and let it construct the config tree # Run the argparser args = parser.parse_args(arg_arr) print(partial_args.verbosity) print(args.plugin, type(args.plugin)) if args.plugin is None: parser.error("Please select a plugin to run") vollog.log(constants.LOGLEVEL_VVV, "Cache directory used: {}".format(constants.CACHE_PATH)) plugin = plugin_list[args.plugin] base_config_path = "plugins" plugin_config_path = interfaces.configuration.path_join( base_config_path, plugin.__name__) # Special case the -f argument because people use is so frequently # It has to go here so it can be overridden by single-location if it's defined # NOTE: This will *BREAK* if LayerStacker, or the automagic configuration system, changes at all ### if args.file: file_name = os.path.abspath(args.file) if not os.path.exists( file_name) and "panda.panda" not in file_name: print("File does not exist: {}".format(file_name)) else: single_location = "file:" + request.pathname2url(file_name) ctx.config[ 'automagic.LayerStacker.single_location'] = single_location # UI fills in the config, here we load it from the config file and do it before we process the CL parameters if args.config: with open(args.config, "r") as f: json_val = json.load(f) ctx.config.splice( plugin_config_path, interfaces.configuration.HierarchicalDict(json_val)) self.populate_config(ctx, configurables_list, args, plugin_config_path) if args.extend: for extension in args.extend: if '=' not in extension: raise ValueError( "Invalid extension (extensions must be of the format \"conf.path.value='value'\")" ) address, value = extension[:extension.find('=')], json.loads( extension[extension.find('=') + 1:]) ctx.config[address] = value # It should be up to the UI to determine which automagics to run, so this is before BACK TO THE FRAMEWORK automagics = automagic.choose_automagic(automagics, plugin) self.output_dir = args.output_dir ### # BACK TO THE FRAMEWORK ### try: progress_callback = PrintedProgress() if args.quiet: progress_callback = MuteProgress() constructed = plugins.construct_plugin(ctx, automagics, plugin, base_config_path, progress_callback, self) # return (ctx, automagics, plugin, base_config_path, progress_callback, self) if args.write_config: vollog.debug("Writing out configuration data to config.json") with open("config.json", "w") as f: json.dump(dict(constructed.build_configuration()), f, sort_keys=True, indent=2) # return StringTextRenderer().render(constructed.run()) return constructed except exceptions.UnsatisfiedException as excp: self.process_exceptions(excp) parser.exit( 1, "Unable to validate the plugin requirements: {}\n".format( [x for x in excp.unsatisfied]))
def run(self): """Executes the command line module, taking the system arguments, determining the plugin to run and then running it.""" sys.stdout.write("Volshell (Volatility 3 Framework) {}\n".format( constants.PACKAGE_VERSION)) framework.require_interface_version(1, 0, 0) parser = argparse.ArgumentParser( prog='volshell', description= "A tool for interactivate forensic analysis of memory images") parser.add_argument("-c", "--config", help="Load the configuration from a json file", default=None, type=str) parser.add_argument( "-e", "--extend", help="Extend the configuration with a new (or changed) setting", default=None, action='append') parser.add_argument( "-p", "--plugin-dirs", help="Semi-colon separated list of paths to find plugins", default="", type=str) parser.add_argument( "-s", "--symbol-dirs", help="Semi-colon separated list of paths to find symbols", default="", type=str) parser.add_argument("-v", "--verbosity", help="Increase output verbosity", default=0, action="count") parser.add_argument( "-o", "--output-dir", help="Directory in which to output any generated files", default=os.path.abspath( os.path.join(os.path.dirname(__file__), '..', '..')), type=str) parser.add_argument("-q", "--quiet", help="Remove progress feedback", default=False, action='store_true') parser.add_argument("--log", help="Log output to a file as well as the console", default=None, type=str) parser.add_argument( "-f", "--file", metavar='FILE', default=None, type=str, help= "Shorthand for --single-location=file:// if single-location is not defined" ) parser.add_argument( "--write-config", help="Write configuration JSON file out to config.json", default=False, action='store_true') parser.add_argument("--clear-cache", help="Clears out all short-term cached items", default=False, action='store_true') # Volshell specific flags os_specific = parser.add_mutually_exclusive_group(required=False) os_specific.add_argument("-w", "--windows", default=False, action="store_true", help="Run a Windows volshell") os_specific.add_argument("-l", "--linux", default=False, action="store_true", help="Run a Linux volshell") os_specific.add_argument("-m", "--mac", default=False, action="store_true", help="Run a Mac volshell") # We have to filter out help, otherwise parse_known_args will trigger the help message before having # processed the plugin choice or had the plugin subparser added. known_args = [ arg for arg in sys.argv if arg != '--help' and arg != '-h' ] partial_args, _ = parser.parse_known_args(known_args) if partial_args.plugin_dirs: volatility.plugins.__path__ = [ os.path.abspath(p) for p in partial_args.plugin_dirs.split(";") ] + constants.PLUGINS_PATH if partial_args.symbol_dirs: volatility.symbols.__path__ = [ os.path.abspath(p) for p in partial_args.symbol_dirs.split(";") ] + constants.SYMBOL_BASEPATHS vollog.info("Volatility plugins path: {}".format( volatility.plugins.__path__)) vollog.info("Volatility symbols path: {}".format( volatility.symbols.__path__)) if partial_args.log: file_logger = logging.FileHandler(partial_args.log) file_logger.setLevel(0) file_formatter = logging.Formatter( datefmt='%y-%m-%d %H:%M:%S', fmt='%(asctime)s %(name)-12s %(levelname)-8s %(message)s') file_logger.setFormatter(file_formatter) vollog.addHandler(file_logger) vollog.info("Logging started") if partial_args.verbosity < 3: console.setLevel(30 - (partial_args.verbosity * 10)) else: console.setLevel(10 - (partial_args.verbosity - 2)) if partial_args.clear_cache: for cache_filename in glob.glob( os.path.join(constants.CACHE_PATH, '*.cache')): os.unlink(cache_filename) # Do the initialization ctx = contexts.Context() # Construct a blank context failures = framework.import_files( volatility.plugins, True) # Will not log as console's default level is WARNING if failures: parser.epilog = "The following plugins could not be loaded (use -vv to see why): " + \ ", ".join(sorted(failures)) vollog.info(parser.epilog) automagics = automagic.available(ctx) # Initialize the list of plugins in case volshell needs it framework.list_plugins() seen_automagics = set() configurables_list = {} for amagic in automagics: if amagic in seen_automagics: continue seen_automagics.add(amagic) if isinstance(amagic, interfaces.configuration.ConfigurableInterface): self.populate_requirements_argparse(parser, amagic.__class__) configurables_list[amagic.__class__.__name__] = amagic # We don't list plugin arguments, because they can be provided within python volshell_plugin_list = { 'generic': generic.Volshell, 'windows': windows.Volshell } for plugin in volshell_plugin_list: subparser = parser.add_argument_group( title=plugin.capitalize(), description="Configuration options based on {} options".format( plugin.capitalize())) self.populate_requirements_argparse(subparser, volshell_plugin_list[plugin]) configurables_list[plugin] = volshell_plugin_list[plugin] ### # PASS TO UI ### # Hand the plugin requirements over to the CLI (us) and let it construct the config tree # Run the argparser args = parser.parse_args() vollog.log(constants.LOGLEVEL_VVV, "Cache directory used: {}".format(constants.CACHE_PATH)) plugin = generic.Volshell if args.windows: plugin = windows.Volshell if args.linux: plugin = linux.Volshell if args.mac: plugin = mac.Volshell base_config_path = "plugins" plugin_config_path = interfaces.configuration.path_join( base_config_path, plugin.__name__) # Special case the -f argument because people use is so frequently # It has to go here so it can be overridden by single-location if it's defined # NOTE: This will *BREAK* if LayerStacker, or the automagic configuration system, changes at all ### if args.file: file_name = os.path.abspath(args.file) if not os.path.exists(file_name): vollog.log(logging.INFO, "File does not exist: {}".format(file_name)) else: single_location = "file:" + request.pathname2url(file_name) ctx.config[ 'automagic.LayerStacker.single_location'] = single_location # UI fills in the config, here we load it from the config file and do it before we process the CL parameters if args.config: with open(args.config, "r") as f: json_val = json.load(f) ctx.config.splice( plugin_config_path, interfaces.configuration.HierarchicalDict(json_val)) self.populate_config(ctx, configurables_list, args, plugin_config_path) if args.extend: for extension in args.extend: if '=' not in extension: raise ValueError( "Invalid extension (extensions must be of the format \"conf.path.value='value'\")" ) address, value = extension[:extension.find('=')], json.loads( extension[extension.find('=') + 1:]) ctx.config[address] = value # It should be up to the UI to determine which automagics to run, so this is before BACK TO THE FRAMEWORK automagics = automagic.choose_automagic(automagics, plugin) self.output_dir = args.output_dir ### # BACK TO THE FRAMEWORK ### try: progress_callback = cli.PrintedProgress() if args.quiet: progress_callback = cli.MuteProgress() constructed = plugins.construct_plugin(ctx, automagics, plugin, base_config_path, progress_callback, self) if args.write_config: vollog.debug("Writing out configuration data to config.json") with open("config.json", "w") as f: json.dump(dict(constructed.build_configuration()), f, sort_keys=True, indent=2) # Construct and run the plugin constructed.run() except exceptions.UnsatisfiedException as excp: self.process_exceptions(excp) parser.exit( 1, "Unable to validate the plugin requirements: {}\n".format( [x for x in excp.unsatisfied]))
def run_plugin(dump_obj, plugin_obj, es_url, params=None): """ Execute a single plugin on a dump with optional params. If success data are sent to elastic. """ try: ctx = contexts.Context() constants.PARALLELISM = constants.Parallelism.Off failures = framework.import_files(volatility.plugins, True) automagics = automagic.available(ctx) plugin_list = framework.list_plugins() json_renderer = ReturnJsonRenderer seen_automagics = set() for amagic in automagics: if amagic in seen_automagics: continue seen_automagics.add(amagic) plugin = plugin_list.get(plugin_obj.name) base_config_path = "plugins" file_name = os.path.abspath(dump_obj.upload.path) single_location = "file:" + pathname2url(file_name) ctx.config["automagic.LayerStacker.single_location"] = single_location automagics = automagic.choose_automagic(automagics, plugin) # LOCAL DUMPS REQUIRES FILES local_dump = plugin_obj.local_dump # ADD PARAMETERS, AND IF LOCAL DUMP ENABLE ADD DUMP TRUE BY DEFAULT plugin_config_path = interfaces.configuration.path_join( base_config_path, plugin.__name__) if params: # ADD PARAMETERS TO PLUGIN CONF for k, v in params.items(): extended_path = interfaces.configuration.path_join( plugin_config_path, k) ctx.config[extended_path] = v if k == "dump" and v == True: # IF DUMP TRUE HAS BEEN PASS IT'LL DUMP LOCALLY local_dump = True if not params and local_dump: # IF ADMIN SET LOCAL DUMP ADD DUMP TRUE AS PARAMETER extended_path = interfaces.configuration.path_join( plugin_config_path, "dump") ctx.config[extended_path] = True if local_dump: # IF PARAM/ADMIN DUMP CREATE FILECONSUMER consumer = FileConsumer() local_path = "{}/{}/{}".format(settings.MEDIA_ROOT, dump_obj.index, plugin_obj.name) if not os.path.exists(local_path): os.mkdir(local_path) else: consumer = None try: # RUN PLUGIN constructed = plugins.construct_plugin(ctx, automagics, plugin, base_config_path, MuteProgress(), consumer) except exceptions.UnsatisfiedException as excp: # LOG UNSATISFIED ERROR result = Result.objects.get(plugin=plugin_obj, dump=dump_obj) result.result = 3 result.description = "\n".join([ excp.unsatisfied[config_path].description for config_path in excp.unsatisfied ]) result.save() send_to_ws(dump_obj, result, plugin_obj.name) return 0 try: runned_plugin = constructed.run() except Exception as excp: # LOG GENERIC ERROR [VOLATILITY] fulltrace = traceback.TracebackException.from_exception( excp).format(chain=True) result = Result.objects.get(plugin=plugin_obj, dump=dump_obj) result.result = 4 result.description = "\n".join(fulltrace) result.save() send_to_ws(dump_obj, result, plugin_obj.name) return 0 # RENDER OUTPUT IN JSON AND PUT IT IN ELASTIC json_data, error = json_renderer().render(runned_plugin) if len(json_data) > 0: # IF DUMP STORE FILE ON DISK if consumer and consumer.files: for filedata in consumer.files: output_path = "{}/{}".format(local_path, filedata.preferred_filename) with open(output_path, "wb") as f: f.write(filedata.data.getvalue()) ## RUN CLAMAV ON ALL FOLDER if plugin_obj.clamav_check: cd = pyclamd.ClamdUnixSocket() match = cd.multiscan_file(local_path) match = {} if not match else match else: match = {} result = Result.objects.get(plugin=plugin_obj, dump=dump_obj) # BULK CREATE EXTRACTED DUMP FOR EACH DUMPED FILE ed = ExtractedDump.objects.bulk_create([ ExtractedDump( result=result, path="{}/{}".format(local_path, filedata.preferred_filename), sha256=sha256_checksum("{}/{}".format( local_path, filedata.preferred_filename)), clamav=(match["{}/{}".format( local_path, filedata.preferred_filename)][1] if "{}/{}".format( local_path, filedata.preferred_filename) in match.keys() else None), ) for filedata in consumer.files ]) ## RUN VT AND REGIPY AS DASK SUBTASKS if plugin_obj.vt_check or plugin_obj.regipy_check: dask_client = get_client() secede() tasks = [] for filedata in consumer.files: task = dask_client.submit( run_vt if plugin_obj.vt_check else run_regipy, result.pk, "{}/{}".format(local_path, filedata.preferred_filename), ) tasks.append(task) results = dask_client.gather(tasks) rejoin() es = Elasticsearch( [es_url], request_timeout=60, timeout=60, max_retries=10, retry_on_timeout=True, ) helpers.bulk( es, gendata( "{}_{}".format(dump_obj.index, plugin_obj.name.lower()), plugin_obj.name, json_data, ), ) # EVERYTHING OK result = Result.objects.get(plugin=plugin_obj, dump=dump_obj) result.result = 2 result.description = error result.save() else: # OK BUT EMPTY result = Result.objects.get(plugin=plugin_obj, dump=dump_obj) result.result = 1 result.description = error result.save() send_to_ws(dump_obj, result, plugin_obj.name) return 0 except Exception as excp: # LOG GENERIC ERROR [ELASTIC] fulltrace = traceback.TracebackException.from_exception(excp).format( chain=True) result = Result.objects.get(plugin=plugin_obj, dump=dump_obj) result.result = 4 result.description = "\n".join(fulltrace) result.save() send_to_ws(dump_obj, result, plugin_obj.name) return 0