def __call__( self, context: interfaces.context.ContextInterface, config_path: str, requirement: interfaces.configuration.RequirementInterface, progress_callback: constants.ProgressCallback = None ) -> Optional[List[str]]: """Runs the automagic over the configurable.""" framework.import_files(sys.modules['volatility.framework.layers']) # Quick exit if we're not needed if not requirement.unsatisfied(context, config_path): return None # Bow out quickly if the UI hasn't provided a single_location unsatisfied = self.unsatisfied(self.context, self.config_path) if unsatisfied: vollog.info( "Unable to run LayerStacker, unsatisfied requirement: {}". format(unsatisfied)) return list(unsatisfied) if not self.config or not self.config.get('single_location', None): raise ValueError( "Unable to run LayerStacker, single_location parameter not provided" ) # Search for suitable requirements self.stack(context, config_path, requirement, progress_callback) if progress_callback is not None: progress_callback(100, "Stacking attempts finished") return None
def __call__(self, context: interfaces.context.ContextInterface, config_path: str, requirement: interfaces.configuration.RequirementInterface, progress_callback=None, optional=False) -> List[str]: # Make sure we import the layers, so they can reconstructed framework.import_files(sys.modules['volatility.framework.layers']) result = [] # type: List[str] if requirement.unsatisfied(context, config_path): # Having called validate at the top level tells us both that we need to dig deeper # but also ensures that TranslationLayerRequirements have got the correct subrequirements if their class is populated subreq_config_path = interfaces.configuration.path_join( config_path, requirement.name) for subreq in requirement.requirements.values(): try: self(context, subreq_config_path, subreq, optional=optional or subreq.optional) except Exception as e: # We don't really care if this fails, it tends to mean the configuration isn't complete for that item vollog.log(constants.LOGLEVEL_VVVV, "Construction Exception occurred: {}".format(e)) invalid = subreq.unsatisfied(context, subreq_config_path) # We want to traverse optional paths, so don't check until we've tried to validate # We also don't want to emit a debug message when a parent is optional, hence the optional parameter if invalid and not (optional or subreq.optional): vollog.log( constants.LOGLEVEL_V, "Failed on requirement: {}".format(subreq_config_path)) result.append( interfaces.configuration.path_join( subreq_config_path, subreq.name)) if result: return result elif isinstance( requirement, interfaces.configuration. ConstructableRequirementInterface): # We know all the subrequirements are filled, so let's populate requirement.construct(context, config_path) if progress_callback is not None: progress_callback(100, "Reconstruction finished") return []
def choose_os_stackers(plugin): """Identifies the stackers that should be run, based on the plugin (and thus os) provided""" plugin_first_level = plugin.__module__.split('.')[2] # Ensure all stackers are loaded framework.import_files(sys.modules['volatility.framework.layers']) result = [] for stacker in sorted(framework.class_subclasses( interfaces.automagic.StackerLayerInterface), key=lambda x: x.stack_order): if plugin_first_level in stacker.exclusion_list: continue result.append(stacker.__name__) return result
def init(self, plugin_class, memdump): """ Module which initialize all volatility 3 internals @param plugin_class: plugin class. Ex. windows.pslist.PsList @param memdump: path to memdump. Ex. file:///home/vol3/memory.dmp @return: Volatility3 interface. """ volatility.framework.require_interface_version(1, 0, 0) # Set the PARALLELISM #constants.PARALLELISM = constants.Parallelism.Multiprocessing #constants.PARALLELISM = constants.Parallelism.Threading constants.PARALLELISM = constants.Parallelism.Off # Do the initialization self.context = contexts.Context() # Construct a blank context # Will not log as console's default level is WARNING failures = framework.import_files(volatility.plugins, True) self.automagics = automagic.available(self.context) # Initialize the list of plugins in case the plugin needs it plugin_list = framework.list_plugins() self.context.config['automagic.LayerStacker.single_location'] = self.memdump self.automagics = automagic.choose_automagic(self.automagics, plugin_class) volatility_interface = plugins.construct_plugin( self.context, self.automagics, plugin_class, self.base_config_path, None, None) return volatility_interface
def available(context: interfaces.context.ContextInterface) -> List[interfaces.automagic.AutomagicInterface]: """Returns an ordered list of all subclasses of :class:`~volatility.framework.interfaces.automagic.AutomagicInterface`. The order is based on the priority attributes of the subclasses, in order to ensure the automagics are listed in an appropriate order. Args: context: The context that will contain any automagic configuration values. """ import_files(sys.modules[__name__]) config_path = constants.AUTOMAGIC_CONFIG_PATH return sorted([ clazz(context, interfaces.configuration.path_join(config_path, clazz.__name__)) for clazz in class_subclasses(interfaces.automagic.AutomagicInterface) ], key = lambda x: x.priority)
def prepare_forensic_session(self, event): memdump_path = event.memdump_path # init Volatility3 failures = framework.import_files(volatility.plugins, True) if failures and self.debug: for f in failures: self.logger.debug('Plugin failed to load: %s', f) plugin_list = framework.list_plugins() ctx = contexts.Context() # Construct a blank context automagics = automagic.available(ctx) # Find all the automagics # populate ctx.config ctx.config['automagic.LayerStacker.single_location'] = Path(memdump_path).as_uri() self.context.trigger('forensic_session', context=ctx, automagics=automagics, plugin_list=plugin_list)
def __init__(self, image_path, plugin_name, plugins_path): framework.require_interface_version(2, 0, 0) # import the plugin files failures = framework.import_files(volatility.plugins, True) # load the framework plugins list plugin_list = framework.list_plugins() plugin_info = self.getPlugin(plugin_name) if plugin_info is None: raise Exception("Plugin information for [" + plugin_name + "] not found") plugin_info['obj'] = plugin_list[plugin_info['name']] # image path file_name = os.path.abspath(image_path) if not os.path.exists(file_name): raise Exception("File does not exist: {}".format(file_name)) # set context self.ctx = contexts.Context() # Construct a blank context automagics = automagic.available(self.ctx) automagics = automagic.choose_automagic(automagics, plugin_info['obj']) single_location = "file:" + request.pathname2url(file_name) self.ctx.config[ 'automagic.LayerStacker.single_location'] = single_location # build plugin context base_config_path = os.path.abspath(plugins_path) progress_callback = PrintedProgress() constructed = plugins.construct_plugin( self.ctx, automagics, plugin_info['obj'], base_config_path, progress_callback, self.file_handler_class_factory()) # run the plugin and render the results to json treegrid = constructed.run() #renderers = dict([(x.name.lower(), x) for x in framework.class_subclasses(text_renderer.CLIRenderer)]) #renderers['quick']().render(treegrid) #print(treegrid) results = self.render(treegrid) #print(results) self.removeKeySpace(results) # go for the function that parse the json results to get clean output self.results = plugin_info['function'](results)
def handle(self, *args, **kwargs): plugins = Plugin.objects.all() installed_plugins = [x.name for x in plugins] if len(plugins) > 0: self.stdout.write( self.style.SUCCESS("Plugins in db: {}".format( ", ".join(installed_plugins)))) else: self.stdout.write(self.style.SUCCESS("No plugins in db")) ctx = contexts.Context() failures = framework.import_files(volatility.plugins, True) available_plugins = framework.list_plugins() self.stdout.write("Available Plugins: {}".format( ", ".join(installed_plugins))) for plugin in plugins: if plugin.name not in available_plugins: plugin.disabled = True plugin.save() self.stdout.write( self.style.ERROR( "Plugin {} disabled. It is not available anymore!". format(plugin))) for plugin in available_plugins: if plugin not in installed_plugins: if plugin.startswith("linux"): plugin = Plugin(name=plugin, operating_system="Linux") elif plugin.startswith("windows"): plugin = Plugin(name=plugin, operating_system="Windows") elif plugin.startswith("mac"): plugin = Plugin(name=plugin, operating_system="Mac") else: plugin = Plugin(name=plugin, operating_system="Other") plugin.save() self.stdout.write( self.style.SUCCESS("Plugin {} added!".format(plugin))) else: plugin = Plugin.objects.get(name=plugin) for user in get_user_model().objects.all(): up, created = UserPlugin.objects.get_or_create(user=user, plugin=plugin) if created: self.stdout.write( self.style.SUCCESS("Plugin {} added to {}!".format( plugin, user)))
def run(self): # we aren't really doing logging, but you can change these numbers to get more details vollog = logging.getLogger(__name__) vollog = logging.getLogger() # vollog.setLevel(1000) console = logging.StreamHandler() #console.setLevel(logging.WARNING) formatter = logging.Formatter( '%(levelname)-8s %(name)-12s: %(message)s') console.setFormatter(formatter) vollog.addHandler(console) volatility.framework.require_interface_version(1, 0, 0) # also change here for log level #console.setLevel(1000) constants.PARALLELISM = constants.Parallelism.Off ctx = contexts.Context() # Construct a blank context failures = framework.import_files( volatility.plugins, True) # Will not log as console's default level is WARNING automagics = automagic.available(ctx) plugin_list = framework.list_plugins() seen_automagics = set() configurables_list = {} for amagic in automagics: if amagic in seen_automagics: continue seen_automagics.add(amagic) if isinstance(amagic, interfaces.configuration.ConfigurableInterface): configurables_list[amagic.__class__.__name__] = amagic plugin_name = "linux.pstree.PsTree" # we're just "kinda" running a plugin plugin = plugin_list[plugin_name] base_config_path = "plugins" plugin_config_path = interfaces.configuration.path_join( base_config_path, plugin.__name__) # It should be up to the UI to determine which automagics to run, so this is before BACK TO THE FRAMEWORK automagics = automagic.choose_automagic(automagics, plugin) # this is our fake file that represents QEMU memory single_location = "file:" + pathname2url("/panda.panda") ctx.config['automagic.LayerStacker.single_location'] = single_location constructed = plugins.construct_plugin(ctx, automagics, plugin, base_config_path, MuteProgress(), self) return constructed
def get_parameters(plugin): """ Obtains parameters list from volatility plugin """ ctx = contexts.Context() failures = framework.import_files(volatility.plugins, True) plugin_list = framework.list_plugins() params = [] if plugin in plugin_list: for requirement in plugin_list[plugin].get_requirements(): additional = {} additional["optional"] = requirement.optional additional["name"] = requirement.name if isinstance(requirement, requirements.URIRequirement): additional["mode"] = "single" additional["type"] = "file" elif isinstance(requirement, interfaces.configuration.SimpleTypeRequirement): additional["mode"] = "single" additional["type"] = requirement.instance_type elif isinstance( requirement, volatility.framework.configuration.requirements. ListRequirement, ): additional["mode"] = "list" additional["type"] = requirement.element_type elif isinstance( requirement, volatility.framework.configuration.requirements. ChoiceRequirement, ): additional["type"] = str additional["mode"] = "single" additional["choices"] = requirement.choices else: continue params.append(additional) return params
import volatility.cli import renderersyslog import configparser from urllib import parse, request from volatility.cli import text_renderer from volatility import framework from volatility.framework.automagic import stacker from volatility.framework import automagic, constants, contexts, exceptions, interfaces, plugins, configuration config = configparser.ConfigParser() config.read('config_syslog.ini') pluginname = str(config["plugin"]["value"]) filename = str(config["location"]["file"]) framework.import_files(volatility.plugins, True) renderers = dict([ (x.name.lower(), x) for x in framework.class_subclasses(text_renderer.CLIRenderer) ]) ctx = contexts.Context() single_location = "file:" + request.pathname2url(filename) ctx.config['automagic.LayerStacker.single_location'] = single_location automagics = automagic.available(ctx) plugin_list = framework.list_plugins() plugin = plugin_list[pluginname] automagics = automagic.choose_automagic(automagics, plugin) if ctx.config.get('automagic.LayerStacker.stackers', None) is None:
def run(self, argstring): # Make sure we log everything vollog = logging.getLogger() #vollog.setLevel(1) # Trim the console down by default console = logging.StreamHandler() #console.setLevel(logging.FATAL) formatter = logging.Formatter( '%(levelname)-8s %(name)-12s: %(message)s') console.setFormatter(formatter) vollog.addHandler(console) # Make sure we log everything vollog = logging.getLogger() #vollog.setLevel(1) # Trim the console down by default console = logging.StreamHandler() #console.setLevel(logging.WARNING) formatter = logging.Formatter( '%(levelname)-8s %(name)-12s: %(message)s') console.setFormatter(formatter) vollog.addHandler(console) arg_arr = shlex.split(argstring) """Executes the command line module, taking the system arguments, determining the plugin to run and then running it.""" sys.stdout.write("Volatility 3 Framework {}\n".format( constants.PACKAGE_VERSION)) volatility.framework.require_interface_version(1, 0, 0) renderers = dict([ (x.name.lower(), x) for x in framework.class_subclasses(text_renderer.CLIRenderer) ]) parser = argparse.ArgumentParser( prog='volatility', description="An open-source memory forensics framework") parser.add_argument("-c", "--config", help="Load the configuration from a json file", default=None, type=str) parser.add_argument( "--parallelism", help= "Enables parallelism (defaults to processes if no argument given)", nargs='?', choices=['processes', 'threads', 'off'], const='processes', default=None, type=str) parser.add_argument( "-e", "--extend", help="Extend the configuration with a new (or changed) setting", default=None, action='append') parser.add_argument( "-p", "--plugin-dirs", help="Semi-colon separated list of paths to find plugins", default="", type=str) parser.add_argument( "-s", "--symbol-dirs", help="Semi-colon separated list of paths to find symbols", default="", type=str) parser.add_argument("-v", "--verbosity", help="Increase output verbosity", default=0, action="count") parser.add_argument("-l", "--log", help="Log output to a file as well as the console", default=None, type=str) parser.add_argument( "-o", "--output-dir", help="Directory in which to output any generated files", default=os.path.abspath( os.path.join(os.path.dirname(__file__), '..', '..')), type=str) parser.add_argument("-q", "--quiet", help="Remove progress feedback", default=False, action='store_true') parser.add_argument( "-r", "--renderer", metavar='RENDERER', help="Determines how to render the output ({})".format(", ".join( list(renderers))), default="quick", choices=list(renderers)) parser.add_argument( "-f", "--file", metavar='FILE', default=None, type=str, help= "Shorthand for --single-location=file:// if single-location is not defined" ) parser.add_argument( "--write-config", help="Write configuration JSON file out to config.json", default=False, action='store_true') # We have to filter out help, otherwise parse_known_args will trigger the help message before having # processed the plugin choice or had the plugin subparser added. known_args = [ arg for arg in arg_arr if arg != '--help' and arg != '-h' ] partial_args, _ = parser.parse_known_args(known_args) if partial_args.plugin_dirs: volatility.plugins.__path__ = [ os.path.abspath(p) for p in partial_args.plugin_dirs.split(";") ] + constants.PLUGINS_PATH if partial_args.symbol_dirs: volatility.symbols.__path__ = [ os.path.abspath(p) for p in partial_args.symbol_dirs.split(";") ] + constants.SYMBOL_BASEPATHS if partial_args.log: file_logger = logging.FileHandler(partial_args.log) #file_logger.setLevel(1) file_formatter = logging.Formatter( datefmt='%y-%m-%d %H:%M:%S', fmt='%(asctime)s %(name)-12s %(levelname)-8s %(message)s') file_logger.setFormatter(file_formatter) vollog.addHandler(file_logger) vollog.info("Logging started") if partial_args.verbosity < 3: console.setLevel(30 - (partial_args.verbosity * 10)) else: console.setLevel(10 - (partial_args.verbosity - 2)) #console.setLevel(0) vollog.info("Volatility plugins path: {}".format( volatility.plugins.__path__)) vollog.info("Volatility symbols path: {}".format( volatility.symbols.__path__)) # Set the PARALLELISM if partial_args.parallelism == 'processes': constants.PARALLELISM = constants.Parallelism.Multiprocessing elif partial_args.parallelism == 'threading': constants.PARALLELISM = constants.Parallelism.Threading else: constants.PARALLELISM = constants.Parallelism.Off # Do the initialization ctx = contexts.Context() # Construct a blank context failures = framework.import_files( volatility.plugins, True) # Will not log as console's default level is WARNING if failures: parser.epilog = "The following plugins could not be loaded (use -vv to see why): " + \ ", ".join(sorted(failures)) vollog.info(parser.epilog) automagics = automagic.available(ctx) plugin_list = framework.list_plugins() seen_automagics = set() configurables_list = {} for amagic in automagics: if amagic in seen_automagics: continue seen_automagics.add(amagic) if isinstance(amagic, interfaces.configuration.ConfigurableInterface): self.populate_requirements_argparse(parser, amagic.__class__) configurables_list[amagic.__class__.__name__] = amagic subparser = parser.add_subparsers(title="Plugins", dest="plugin", action=HelpfulSubparserAction) for plugin in sorted(plugin_list): plugin_parser = subparser.add_parser( plugin, help=plugin_list[plugin].__doc__) self.populate_requirements_argparse(plugin_parser, plugin_list[plugin]) configurables_list[plugin] = plugin_list[plugin] ### # PASS TO UI ### # Hand the plugin requirements over to the CLI (us) and let it construct the config tree # Run the argparser args = parser.parse_args(arg_arr) print(partial_args.verbosity) print(args.plugin, type(args.plugin)) if args.plugin is None: parser.error("Please select a plugin to run") vollog.log(constants.LOGLEVEL_VVV, "Cache directory used: {}".format(constants.CACHE_PATH)) plugin = plugin_list[args.plugin] base_config_path = "plugins" plugin_config_path = interfaces.configuration.path_join( base_config_path, plugin.__name__) # Special case the -f argument because people use is so frequently # It has to go here so it can be overridden by single-location if it's defined # NOTE: This will *BREAK* if LayerStacker, or the automagic configuration system, changes at all ### if args.file: file_name = os.path.abspath(args.file) if not os.path.exists( file_name) and "panda.panda" not in file_name: print("File does not exist: {}".format(file_name)) else: single_location = "file:" + request.pathname2url(file_name) ctx.config[ 'automagic.LayerStacker.single_location'] = single_location # UI fills in the config, here we load it from the config file and do it before we process the CL parameters if args.config: with open(args.config, "r") as f: json_val = json.load(f) ctx.config.splice( plugin_config_path, interfaces.configuration.HierarchicalDict(json_val)) self.populate_config(ctx, configurables_list, args, plugin_config_path) if args.extend: for extension in args.extend: if '=' not in extension: raise ValueError( "Invalid extension (extensions must be of the format \"conf.path.value='value'\")" ) address, value = extension[:extension.find('=')], json.loads( extension[extension.find('=') + 1:]) ctx.config[address] = value # It should be up to the UI to determine which automagics to run, so this is before BACK TO THE FRAMEWORK automagics = automagic.choose_automagic(automagics, plugin) self.output_dir = args.output_dir ### # BACK TO THE FRAMEWORK ### try: progress_callback = PrintedProgress() if args.quiet: progress_callback = MuteProgress() constructed = plugins.construct_plugin(ctx, automagics, plugin, base_config_path, progress_callback, self) # return (ctx, automagics, plugin, base_config_path, progress_callback, self) if args.write_config: vollog.debug("Writing out configuration data to config.json") with open("config.json", "w") as f: json.dump(dict(constructed.build_configuration()), f, sort_keys=True, indent=2) # return StringTextRenderer().render(constructed.run()) return constructed except exceptions.UnsatisfiedException as excp: self.process_exceptions(excp) parser.exit( 1, "Unable to validate the plugin requirements: {}\n".format( [x for x in excp.unsatisfied]))
def run(self): """Executes the command line module, taking the system arguments, determining the plugin to run and then running it.""" sys.stdout.write("Volshell (Volatility 3 Framework) {}\n".format( constants.PACKAGE_VERSION)) framework.require_interface_version(1, 0, 0) parser = argparse.ArgumentParser( prog='volshell', description= "A tool for interactivate forensic analysis of memory images") parser.add_argument("-c", "--config", help="Load the configuration from a json file", default=None, type=str) parser.add_argument( "-e", "--extend", help="Extend the configuration with a new (or changed) setting", default=None, action='append') parser.add_argument( "-p", "--plugin-dirs", help="Semi-colon separated list of paths to find plugins", default="", type=str) parser.add_argument( "-s", "--symbol-dirs", help="Semi-colon separated list of paths to find symbols", default="", type=str) parser.add_argument("-v", "--verbosity", help="Increase output verbosity", default=0, action="count") parser.add_argument( "-o", "--output-dir", help="Directory in which to output any generated files", default=os.path.abspath( os.path.join(os.path.dirname(__file__), '..', '..')), type=str) parser.add_argument("-q", "--quiet", help="Remove progress feedback", default=False, action='store_true') parser.add_argument("--log", help="Log output to a file as well as the console", default=None, type=str) parser.add_argument( "-f", "--file", metavar='FILE', default=None, type=str, help= "Shorthand for --single-location=file:// if single-location is not defined" ) parser.add_argument( "--write-config", help="Write configuration JSON file out to config.json", default=False, action='store_true') parser.add_argument("--clear-cache", help="Clears out all short-term cached items", default=False, action='store_true') # Volshell specific flags os_specific = parser.add_mutually_exclusive_group(required=False) os_specific.add_argument("-w", "--windows", default=False, action="store_true", help="Run a Windows volshell") os_specific.add_argument("-l", "--linux", default=False, action="store_true", help="Run a Linux volshell") os_specific.add_argument("-m", "--mac", default=False, action="store_true", help="Run a Mac volshell") # We have to filter out help, otherwise parse_known_args will trigger the help message before having # processed the plugin choice or had the plugin subparser added. known_args = [ arg for arg in sys.argv if arg != '--help' and arg != '-h' ] partial_args, _ = parser.parse_known_args(known_args) if partial_args.plugin_dirs: volatility.plugins.__path__ = [ os.path.abspath(p) for p in partial_args.plugin_dirs.split(";") ] + constants.PLUGINS_PATH if partial_args.symbol_dirs: volatility.symbols.__path__ = [ os.path.abspath(p) for p in partial_args.symbol_dirs.split(";") ] + constants.SYMBOL_BASEPATHS vollog.info("Volatility plugins path: {}".format( volatility.plugins.__path__)) vollog.info("Volatility symbols path: {}".format( volatility.symbols.__path__)) if partial_args.log: file_logger = logging.FileHandler(partial_args.log) file_logger.setLevel(0) file_formatter = logging.Formatter( datefmt='%y-%m-%d %H:%M:%S', fmt='%(asctime)s %(name)-12s %(levelname)-8s %(message)s') file_logger.setFormatter(file_formatter) vollog.addHandler(file_logger) vollog.info("Logging started") if partial_args.verbosity < 3: console.setLevel(30 - (partial_args.verbosity * 10)) else: console.setLevel(10 - (partial_args.verbosity - 2)) if partial_args.clear_cache: for cache_filename in glob.glob( os.path.join(constants.CACHE_PATH, '*.cache')): os.unlink(cache_filename) # Do the initialization ctx = contexts.Context() # Construct a blank context failures = framework.import_files( volatility.plugins, True) # Will not log as console's default level is WARNING if failures: parser.epilog = "The following plugins could not be loaded (use -vv to see why): " + \ ", ".join(sorted(failures)) vollog.info(parser.epilog) automagics = automagic.available(ctx) # Initialize the list of plugins in case volshell needs it framework.list_plugins() seen_automagics = set() configurables_list = {} for amagic in automagics: if amagic in seen_automagics: continue seen_automagics.add(amagic) if isinstance(amagic, interfaces.configuration.ConfigurableInterface): self.populate_requirements_argparse(parser, amagic.__class__) configurables_list[amagic.__class__.__name__] = amagic # We don't list plugin arguments, because they can be provided within python volshell_plugin_list = { 'generic': generic.Volshell, 'windows': windows.Volshell } for plugin in volshell_plugin_list: subparser = parser.add_argument_group( title=plugin.capitalize(), description="Configuration options based on {} options".format( plugin.capitalize())) self.populate_requirements_argparse(subparser, volshell_plugin_list[plugin]) configurables_list[plugin] = volshell_plugin_list[plugin] ### # PASS TO UI ### # Hand the plugin requirements over to the CLI (us) and let it construct the config tree # Run the argparser args = parser.parse_args() vollog.log(constants.LOGLEVEL_VVV, "Cache directory used: {}".format(constants.CACHE_PATH)) plugin = generic.Volshell if args.windows: plugin = windows.Volshell if args.linux: plugin = linux.Volshell if args.mac: plugin = mac.Volshell base_config_path = "plugins" plugin_config_path = interfaces.configuration.path_join( base_config_path, plugin.__name__) # Special case the -f argument because people use is so frequently # It has to go here so it can be overridden by single-location if it's defined # NOTE: This will *BREAK* if LayerStacker, or the automagic configuration system, changes at all ### if args.file: file_name = os.path.abspath(args.file) if not os.path.exists(file_name): vollog.log(logging.INFO, "File does not exist: {}".format(file_name)) else: single_location = "file:" + request.pathname2url(file_name) ctx.config[ 'automagic.LayerStacker.single_location'] = single_location # UI fills in the config, here we load it from the config file and do it before we process the CL parameters if args.config: with open(args.config, "r") as f: json_val = json.load(f) ctx.config.splice( plugin_config_path, interfaces.configuration.HierarchicalDict(json_val)) self.populate_config(ctx, configurables_list, args, plugin_config_path) if args.extend: for extension in args.extend: if '=' not in extension: raise ValueError( "Invalid extension (extensions must be of the format \"conf.path.value='value'\")" ) address, value = extension[:extension.find('=')], json.loads( extension[extension.find('=') + 1:]) ctx.config[address] = value # It should be up to the UI to determine which automagics to run, so this is before BACK TO THE FRAMEWORK automagics = automagic.choose_automagic(automagics, plugin) self.output_dir = args.output_dir ### # BACK TO THE FRAMEWORK ### try: progress_callback = cli.PrintedProgress() if args.quiet: progress_callback = cli.MuteProgress() constructed = plugins.construct_plugin(ctx, automagics, plugin, base_config_path, progress_callback, self) if args.write_config: vollog.debug("Writing out configuration data to config.json") with open("config.json", "w") as f: json.dump(dict(constructed.build_configuration()), f, sort_keys=True, indent=2) # Construct and run the plugin constructed.run() except exceptions.UnsatisfiedException as excp: self.process_exceptions(excp) parser.exit( 1, "Unable to validate the plugin requirements: {}\n".format( [x for x in excp.unsatisfied]))
def run_plugin(dump_obj, plugin_obj, es_url, params=None): """ Execute a single plugin on a dump with optional params. If success data are sent to elastic. """ try: ctx = contexts.Context() constants.PARALLELISM = constants.Parallelism.Off failures = framework.import_files(volatility.plugins, True) automagics = automagic.available(ctx) plugin_list = framework.list_plugins() json_renderer = ReturnJsonRenderer seen_automagics = set() for amagic in automagics: if amagic in seen_automagics: continue seen_automagics.add(amagic) plugin = plugin_list.get(plugin_obj.name) base_config_path = "plugins" file_name = os.path.abspath(dump_obj.upload.path) single_location = "file:" + pathname2url(file_name) ctx.config["automagic.LayerStacker.single_location"] = single_location automagics = automagic.choose_automagic(automagics, plugin) # LOCAL DUMPS REQUIRES FILES local_dump = plugin_obj.local_dump # ADD PARAMETERS, AND IF LOCAL DUMP ENABLE ADD DUMP TRUE BY DEFAULT plugin_config_path = interfaces.configuration.path_join( base_config_path, plugin.__name__) if params: # ADD PARAMETERS TO PLUGIN CONF for k, v in params.items(): extended_path = interfaces.configuration.path_join( plugin_config_path, k) ctx.config[extended_path] = v if k == "dump" and v == True: # IF DUMP TRUE HAS BEEN PASS IT'LL DUMP LOCALLY local_dump = True if not params and local_dump: # IF ADMIN SET LOCAL DUMP ADD DUMP TRUE AS PARAMETER extended_path = interfaces.configuration.path_join( plugin_config_path, "dump") ctx.config[extended_path] = True if local_dump: # IF PARAM/ADMIN DUMP CREATE FILECONSUMER consumer = FileConsumer() local_path = "{}/{}/{}".format(settings.MEDIA_ROOT, dump_obj.index, plugin_obj.name) if not os.path.exists(local_path): os.mkdir(local_path) else: consumer = None try: # RUN PLUGIN constructed = plugins.construct_plugin(ctx, automagics, plugin, base_config_path, MuteProgress(), consumer) except exceptions.UnsatisfiedException as excp: # LOG UNSATISFIED ERROR result = Result.objects.get(plugin=plugin_obj, dump=dump_obj) result.result = 3 result.description = "\n".join([ excp.unsatisfied[config_path].description for config_path in excp.unsatisfied ]) result.save() send_to_ws(dump_obj, result, plugin_obj.name) return 0 try: runned_plugin = constructed.run() except Exception as excp: # LOG GENERIC ERROR [VOLATILITY] fulltrace = traceback.TracebackException.from_exception( excp).format(chain=True) result = Result.objects.get(plugin=plugin_obj, dump=dump_obj) result.result = 4 result.description = "\n".join(fulltrace) result.save() send_to_ws(dump_obj, result, plugin_obj.name) return 0 # RENDER OUTPUT IN JSON AND PUT IT IN ELASTIC json_data, error = json_renderer().render(runned_plugin) if len(json_data) > 0: # IF DUMP STORE FILE ON DISK if consumer and consumer.files: for filedata in consumer.files: output_path = "{}/{}".format(local_path, filedata.preferred_filename) with open(output_path, "wb") as f: f.write(filedata.data.getvalue()) ## RUN CLAMAV ON ALL FOLDER if plugin_obj.clamav_check: cd = pyclamd.ClamdUnixSocket() match = cd.multiscan_file(local_path) match = {} if not match else match else: match = {} result = Result.objects.get(plugin=plugin_obj, dump=dump_obj) # BULK CREATE EXTRACTED DUMP FOR EACH DUMPED FILE ed = ExtractedDump.objects.bulk_create([ ExtractedDump( result=result, path="{}/{}".format(local_path, filedata.preferred_filename), sha256=sha256_checksum("{}/{}".format( local_path, filedata.preferred_filename)), clamav=(match["{}/{}".format( local_path, filedata.preferred_filename)][1] if "{}/{}".format( local_path, filedata.preferred_filename) in match.keys() else None), ) for filedata in consumer.files ]) ## RUN VT AND REGIPY AS DASK SUBTASKS if plugin_obj.vt_check or plugin_obj.regipy_check: dask_client = get_client() secede() tasks = [] for filedata in consumer.files: task = dask_client.submit( run_vt if plugin_obj.vt_check else run_regipy, result.pk, "{}/{}".format(local_path, filedata.preferred_filename), ) tasks.append(task) results = dask_client.gather(tasks) rejoin() es = Elasticsearch( [es_url], request_timeout=60, timeout=60, max_retries=10, retry_on_timeout=True, ) helpers.bulk( es, gendata( "{}_{}".format(dump_obj.index, plugin_obj.name.lower()), plugin_obj.name, json_data, ), ) # EVERYTHING OK result = Result.objects.get(plugin=plugin_obj, dump=dump_obj) result.result = 2 result.description = error result.save() else: # OK BUT EMPTY result = Result.objects.get(plugin=plugin_obj, dump=dump_obj) result.result = 1 result.description = error result.save() send_to_ws(dump_obj, result, plugin_obj.name) return 0 except Exception as excp: # LOG GENERIC ERROR [ELASTIC] fulltrace = traceback.TracebackException.from_exception(excp).format( chain=True) result = Result.objects.get(plugin=plugin_obj, dump=dump_obj) result.result = 4 result.description = "\n".join(fulltrace) result.save() send_to_ws(dump_obj, result, plugin_obj.name) return 0