Example #1
0
def run_plugin(dump_obj, plugin_obj, params=None, user_pk=None):
    """
    Execute a single plugin on a dump with optional params.
    If success data are sent to elastic.
    """
    logging.info("[dump {} - plugin {}] start".format(dump_obj.pk,
                                                      plugin_obj.pk))
    try:
        ctx = contexts.Context()
        constants.PARALLELISM = constants.Parallelism.Off
        _ = framework.import_files(volatility3.plugins, True)
        automagics = automagic.available(ctx)
        plugin_list = framework.list_plugins()
        json_renderer = ReturnJsonRenderer
        seen_automagics = set()
        for amagic in automagics:
            if amagic in seen_automagics:
                continue
            seen_automagics.add(amagic)
        plugin = plugin_list.get(plugin_obj.name)
        base_config_path = "plugins"
        file_name = os.path.abspath(dump_obj.upload.path)
        single_location = "file:" + pathname2url(file_name)
        ctx.config["automagic.LayerStacker.single_location"] = single_location
        automagics = automagic.choose_automagic(automagics, plugin)
        if ctx.config.get("automagic.LayerStacker.stackers", None) is None:
            ctx.config[
                "automagic.LayerStacker.stackers"] = stacker.choose_os_stackers(
                    plugin)
        # LOCAL DUMPS REQUIRES FILES
        local_dump = plugin_obj.local_dump

        # ADD PARAMETERS, AND IF LOCAL DUMP ENABLE ADD DUMP TRUE BY DEFAULT
        plugin_config_path = interfaces.configuration.path_join(
            base_config_path, plugin.__name__)
        if params:
            # ADD PARAMETERS TO PLUGIN CONF
            for k, v in params.items():
                if v != "":
                    extended_path = interfaces.configuration.path_join(
                        plugin_config_path, k)
                    ctx.config[extended_path] = v

                if k == "dump" and v:
                    # IF DUMP TRUE HAS BEEN PASS IT'LL DUMP LOCALLY
                    local_dump = True

        if not params and local_dump:
            # IF ADMIN SET LOCAL DUMP ADD DUMP TRUE AS PARAMETER
            extended_path = interfaces.configuration.path_join(
                plugin_config_path, "dump")
            ctx.config[extended_path] = True

        logging.debug("[dump {} - plugin {}] params: {}".format(
            dump_obj.pk, plugin_obj.pk, ctx.config))

        file_list = []
        if local_dump:
            # IF PARAM/ADMIN DUMP CREATE FILECONSUMER
            local_path = "{}/{}/{}".format(settings.MEDIA_ROOT, dump_obj.index,
                                           plugin_obj.name)
            if not os.path.exists(local_path):
                os.mkdir(local_path)
            file_handler = file_handler_class_factory(output_dir=local_path,
                                                      file_list=file_list)
        else:
            local_path = None
            file_handler = file_handler_class_factory(output_dir=None,
                                                      file_list=file_list)

        # #####################
        # ## YARA
        # if not file or rule selected and exists default use that
        if plugin_obj.name in [
                "yarascan.YaraScan", "windows.vadyarascan.VadYaraScan"
        ]:
            if not params:
                has_file = False
            else:
                has_file = False
                for k, v in params.items():
                    if k in ["yara_file", "yara_compiled_file", "yara_rules"]:
                        if v is not None and v != "":
                            has_file = True

            if not has_file:
                rule = CustomRule.objects.get(user__pk=user_pk, default=True)
                if rule:
                    extended_path = interfaces.configuration.path_join(
                        plugin_config_path, "yara_compiled_file")
                    ctx.config[extended_path] = "file:{}".format(rule.path)

            logging.error("[dump {} - plugin {}] params: {}".format(
                dump_obj.pk, plugin_obj.pk, ctx.config))

        try:
            # RUN PLUGIN
            constructed = plugins.construct_plugin(
                ctx,
                automagics,
                plugin,
                base_config_path,
                MuteProgress(),
                file_handler,
            )
        except exceptions.UnsatisfiedException as excp:
            # LOG UNSATISFIED ERROR
            result = Result.objects.get(plugin=plugin_obj, dump=dump_obj)
            result.result = 3
            result.description = "\n".join([
                excp.unsatisfied[config_path].description
                for config_path in excp.unsatisfied
            ])
            result.save()
            send_to_ws(dump_obj, result, plugin_obj.name)

            logging.error("[dump {} - plugin {}] unsatisfied".format(
                dump_obj.pk, plugin_obj.pk))

            return 0
        try:
            runned_plugin = constructed.run()
        except Exception as excp:
            # LOG GENERIC ERROR [VOLATILITY]
            fulltrace = traceback.TracebackException.from_exception(
                excp).format(chain=True)
            result = Result.objects.get(plugin=plugin_obj, dump=dump_obj)
            result.result = 4
            result.description = "\n".join(fulltrace)
            result.save()
            send_to_ws(dump_obj, result, plugin_obj.name)
            logging.error("[dump {} - plugin {}] generic error".format(
                dump_obj.pk, plugin_obj.pk))
            return 0

        # RENDER OUTPUT IN JSON AND PUT IT IN ELASTIC
        json_data, error = json_renderer().render(runned_plugin)

        logging.debug("DATA: {}".format(json_data))
        logging.debug("ERROR: {}".format(error))
        logging.debug("CONFIG: {}".format(ctx.config))

        if len(json_data) > 0:

            # IF DUMP STORE FILE ON DISK
            if local_dump and file_list:
                for file_id in file_list:
                    output_path = "{}/{}".format(local_path,
                                                 file_id.preferred_filename)
                    with open(output_path, "wb") as f:
                        f.write(file_id.getvalue())

                # RUN CLAMAV ON ALL FOLDER
                if plugin_obj.clamav_check:
                    cd = pyclamd.ClamdUnixSocket()
                    match = cd.multiscan_file(local_path)
                    match = {} if not match else match
                else:
                    match = {}

                result = Result.objects.get(plugin=plugin_obj, dump=dump_obj)

                # BULK CREATE EXTRACTED DUMP FOR EACH DUMPED FILE
                ExtractedDump.objects.bulk_create([
                    ExtractedDump(
                        result=result,
                        path="{}/{}".format(local_path,
                                            file_id.preferred_filename),
                        sha256=hash_checksum("{}/{}".format(
                            local_path, file_id.preferred_filename))[0],
                        md5=hash_checksum("{}/{}".format(
                            local_path, file_id.preferred_filename))[1],
                        clamav=(match["{}/{}".format(
                            local_path,
                            file_id.preferred_filename,
                        )][1] if "{}/{}".format(local_path,
                                                file_id.preferred_filename)
                                in match.keys() else None),
                    ) for file_id in file_list
                ])

                # RUN VT AND REGIPY AS DASK SUBTASKS
                if plugin_obj.vt_check or plugin_obj.regipy_check:
                    dask_client = get_client()
                    secede()
                    tasks = []
                    for file_id in file_list:
                        if plugin_obj.vt_check:
                            task = dask_client.submit(
                                run_vt,
                                result.pk,
                                "{}/{}".format(local_path,
                                               file_id.preferred_filename),
                            )
                            tasks.append(task)
                        if plugin_obj.regipy_check:
                            task = dask_client.submit(
                                run_regipy,
                                result.pk,
                                "{}/{}".format(local_path,
                                               file_id.preferred_filename),
                            )
                            tasks.append(task)
                    _ = dask_client.gather(tasks)
                    rejoin()

            es = Elasticsearch(
                [settings.ELASTICSEARCH_URL],
                request_timeout=60,
                timeout=60,
                max_retries=10,
                retry_on_timeout=True,
            )
            helpers.bulk(
                es,
                gendata(
                    "{}_{}".format(dump_obj.index, plugin_obj.name.lower()),
                    json_data,
                    {
                        "orochi_dump":
                        dump_obj.name,
                        "orochi_plugin":
                        plugin_obj.name.lower(),
                        "orochi_os":
                        dump_obj.get_operating_system_display(),
                        "orochi_createdAt":
                        datetime.datetime.now().replace(
                            microsecond=0).isoformat(),
                    },
                ),
            )

            # set max_windows_size on new created index
            es.indices.put_settings(
                index="{}_{}".format(dump_obj.index, plugin_obj.name.lower()),
                body={
                    "index": {
                        "max_result_window": settings.MAX_ELASTIC_WINDOWS_SIZE
                    }
                },
            )

            # EVERYTHING OK
            result = Result.objects.get(plugin=plugin_obj, dump=dump_obj)
            result.result = 2
            result.description = error
            result.save()

            logging.debug("[dump {} - plugin {}] sent to elastic".format(
                dump_obj.pk, plugin_obj.pk))
        else:
            # OK BUT EMPTY
            result = Result.objects.get(plugin=plugin_obj, dump=dump_obj)
            result.result = 1
            result.description = error
            result.save()

            logging.debug("[dump {} - plugin {}] empty".format(
                dump_obj.pk, plugin_obj.pk))
        send_to_ws(dump_obj, result, plugin_obj.name)
        return 0

    except Exception as excp:
        # LOG GENERIC ERROR [ELASTIC]
        fulltrace = traceback.TracebackException.from_exception(excp).format(
            chain=True)
        result = Result.objects.get(plugin=plugin_obj, dump=dump_obj)
        result.result = 4
        result.description = "\n".join(fulltrace)
        result.save()
        send_to_ws(dump_obj, result, plugin_obj.name)
        logging.error("[dump {} - plugin {}] generic error".format(
            dump_obj.pk, plugin_obj.pk))
        return 0
Example #2
0
    def run(self):
        """Executes the command line module, taking the system arguments,
        determining the plugin to run and then running it."""

        volatility3.framework.require_interface_version(1, 0, 0)

        renderers = dict([(x.name.lower(), x) for x in framework.class_subclasses(text_renderer.CLIRenderer)])

        parser = volargparse.HelpfulArgParser(add_help = False,
                                              prog = self.CLI_NAME,
                                              description = "An open-source memory forensics framework")
        parser.add_argument(
            "-h",
            "--help",
            action = "help",
            default = argparse.SUPPRESS,
            help = "Show this help message and exit, for specific plugin options use '{} <pluginname> --help'".format(
                parser.prog))
        parser.add_argument("-c",
                            "--config",
                            help = "Load the configuration from a json file",
                            default = None,
                            type = str)
        parser.add_argument("--parallelism",
                            help = "Enables parallelism (defaults to off if no argument given)",
                            nargs = '?',
                            choices = ['processes', 'threads', 'off'],
                            const = 'processes',
                            default = None,
                            type = str)
        parser.add_argument("-e",
                            "--extend",
                            help = "Extend the configuration with a new (or changed) setting",
                            default = None,
                            action = 'append')
        parser.add_argument("-p",
                            "--plugin-dirs",
                            help = "Semi-colon separated list of paths to find plugins",
                            default = "",
                            type = str)
        parser.add_argument("-s",
                            "--symbol-dirs",
                            help = "Semi-colon separated list of paths to find symbols",
                            default = "",
                            type = str)
        parser.add_argument("-v", "--verbosity", help = "Increase output verbosity", default = 0, action = "count")
        parser.add_argument("-l",
                            "--log",
                            help = "Log output to a file as well as the console",
                            default = None,
                            type = str)
        parser.add_argument("-o",
                            "--output-dir",
                            help = "Directory in which to output any generated files",
                            default = os.getcwd(),
                            type = str)
        parser.add_argument("-q", "--quiet", help = "Remove progress feedback", default = False, action = 'store_true')
        parser.add_argument("-r",
                            "--renderer",
                            metavar = 'RENDERER',
                            help = "Determines how to render the output ({})".format(", ".join(list(renderers))),
                            default = "quick",
                            choices = list(renderers))
        parser.add_argument("-f",
                            "--file",
                            metavar = 'FILE',
                            default = None,
                            type = str,
                            help = "Shorthand for --single-location=file:// if single-location is not defined")
        parser.add_argument("--write-config",
                            help = "Write configuration JSON file out to config.json",
                            default = False,
                            action = 'store_true')
        parser.add_argument("--clear-cache",
                            help = "Clears out all short-term cached items",
                            default = False,
                            action = 'store_true')
        parser.add_argument("--cache-path",
                            help = "Change the default path ({}) used to store the cache".format(constants.CACHE_PATH),
                            default = constants.CACHE_PATH,
                            type = str)

        # We have to filter out help, otherwise parse_known_args will trigger the help message before having
        # processed the plugin choice or had the plugin subparser added.
        known_args = [arg for arg in sys.argv if arg != '--help' and arg != '-h']
        partial_args, _ = parser.parse_known_args(known_args)

        banner_output = sys.stdout
        if renderers[partial_args.renderer].structured_output:
            banner_output = sys.stderr
        banner_output.write("Volatility 3 Framework {}\n".format(constants.PACKAGE_VERSION))

        if partial_args.plugin_dirs:
            volatility3.plugins.__path__ = [os.path.abspath(p)
                                            for p in partial_args.plugin_dirs.split(";")] + constants.PLUGINS_PATH

        if partial_args.symbol_dirs:
            volatility3.symbols.__path__ = [os.path.abspath(p)
                                            for p in partial_args.symbol_dirs.split(";")] + constants.SYMBOL_BASEPATHS

        if partial_args.cache_path:
            constants.CACHE_PATH = partial_args.cache_path

        if partial_args.log:
            file_logger = logging.FileHandler(partial_args.log)
            file_logger.setLevel(1)
            file_formatter = logging.Formatter(datefmt = '%y-%m-%d %H:%M:%S',
                                               fmt = '%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
            file_logger.setFormatter(file_formatter)
            vollog.addHandler(file_logger)
            vollog.info("Logging started")
        if partial_args.verbosity < 3:
            if partial_args.verbosity < 1:
                sys.tracebacklimit = None
            console.setLevel(30 - (partial_args.verbosity * 10))
        else:
            console.setLevel(10 - (partial_args.verbosity - 2))

        vollog.info("Volatility plugins path: {}".format(volatility3.plugins.__path__))
        vollog.info("Volatility symbols path: {}".format(volatility3.symbols.__path__))

        # Set the PARALLELISM
        if partial_args.parallelism == 'processes':
            constants.PARALLELISM = constants.Parallelism.Multiprocessing
        elif partial_args.parallelism == 'threads':
            constants.PARALLELISM = constants.Parallelism.Threading
        else:
            constants.PARALLELISM = constants.Parallelism.Off

        if partial_args.clear_cache:
            framework.clear_cache()

        # Do the initialization
        ctx = contexts.Context()  # Construct a blank context
        failures = framework.import_files(volatility3.plugins,
                                          True)  # Will not log as console's default level is WARNING
        if failures:
            parser.epilog = "The following plugins could not be loaded (use -vv to see why): " + \
                            ", ".join(sorted(failures))
            vollog.info(parser.epilog)
        automagics = automagic.available(ctx)

        plugin_list = framework.list_plugins()

        seen_automagics = set()
        chosen_configurables_list = {}
        for amagic in automagics:
            if amagic in seen_automagics:
                continue
            seen_automagics.add(amagic)
            if isinstance(amagic, interfaces.configuration.ConfigurableInterface):
                self.populate_requirements_argparse(parser, amagic.__class__)

        subparser = parser.add_subparsers(title = "Plugins",
                                          dest = "plugin",
                                          description = "For plugin specific options, run '{} <plugin> --help'".format(
                                              self.CLI_NAME),
                                          action = volargparse.HelpfulSubparserAction)
        for plugin in sorted(plugin_list):
            plugin_parser = subparser.add_parser(plugin, help = plugin_list[plugin].__doc__)
            self.populate_requirements_argparse(plugin_parser, plugin_list[plugin])

        ###
        # PASS TO UI
        ###
        # Hand the plugin requirements over to the CLI (us) and let it construct the config tree

        # Run the argparser
        args = parser.parse_args()
        if args.plugin is None:
            parser.error("Please select a plugin to run")

        vollog.log(constants.LOGLEVEL_VVV, "Cache directory used: {}".format(constants.CACHE_PATH))

        plugin = plugin_list[args.plugin]
        chosen_configurables_list[args.plugin] = plugin
        base_config_path = "plugins"
        plugin_config_path = interfaces.configuration.path_join(base_config_path, plugin.__name__)

        # Special case the -f argument because people use is so frequently
        # It has to go here so it can be overridden by single-location if it's defined
        # NOTE: This will *BREAK* if LayerStacker, or the automagic configuration system, changes at all
        ###
        if args.file:
            file_name = os.path.abspath(args.file)
            if not os.path.exists(file_name):
                vollog.log(logging.INFO, "File does not exist: {}".format(file_name))
            else:
                single_location = "file:" + request.pathname2url(file_name)
                ctx.config['automagic.LayerStacker.single_location'] = single_location

        # UI fills in the config, here we load it from the config file and do it before we process the CL parameters
        if args.config:
            with open(args.config, "r") as f:
                json_val = json.load(f)
                ctx.config.splice(plugin_config_path, interfaces.configuration.HierarchicalDict(json_val))

        # It should be up to the UI to determine which automagics to run, so this is before BACK TO THE FRAMEWORK
        automagics = automagic.choose_automagic(automagics, plugin)
        for amagic in automagics:
            chosen_configurables_list[amagic.__class__.__name__] = amagic

        if ctx.config.get('automagic.LayerStacker.stackers', None) is None:
            ctx.config['automagic.LayerStacker.stackers'] = stacker.choose_os_stackers(plugin)
        self.output_dir = args.output_dir
        if not os.path.exists(self.output_dir):
            parser.error("The output directory specified does not exist: {}".format(self.output_dir))

        self.populate_config(ctx, chosen_configurables_list, args, plugin_config_path)

        if args.extend:
            for extension in args.extend:
                if '=' not in extension:
                    raise ValueError("Invalid extension (extensions must be of the format \"conf.path.value='value'\")")
                address, value = extension[:extension.find('=')], json.loads(extension[extension.find('=') + 1:])
                ctx.config[address] = value

        ###
        # BACK TO THE FRAMEWORK
        ###
        constructed = None
        try:
            progress_callback = PrintedProgress()
            if args.quiet:
                progress_callback = MuteProgress()

            constructed = plugins.construct_plugin(ctx, automagics, plugin, base_config_path, progress_callback,
                                                   self.file_handler_class_factory())

            if args.write_config:
                vollog.debug("Writing out configuration data to config.json")
                with open("config.json", "w") as f:
                    json.dump(dict(constructed.build_configuration()), f, sort_keys = True, indent = 2)
        except exceptions.UnsatisfiedException as excp:
            self.process_unsatisfied_exceptions(excp)
            parser.exit(1, "Unable to validate the plugin requirements: {}\n".format([x for x in excp.unsatisfied]))

        try:
            # Construct and run the plugin
            if constructed:
                renderers[args.renderer]().render(constructed.run())
        except (exceptions.VolatilityException) as excp:
            self.process_exceptions(excp)