Пример #1
0
    def run_all(
            cls, context: interfaces.context.ContextInterface, layer_name: str, vmlinux_symbols: str
    ) -> Iterator[Tuple[int, str, str, int, format_hints.Hex, bool, str]]:
        """It calls each subclass symtab_checks() to test the required symbols,
        type, subtypes, etc so that the respective Netfitler implementation is
        processed accordingly.

        Args:
            context: The context to retrieve required elements (layers, symbol
            tables) from layer_name: The name of the layer on which to operate
            vmlinux_symbols: The name of the table containing the kernel
            symbols

        Yields:
            Process objects
        """

        nfimp_inst = None
        for subclass in class_subclasses(cls):
            if not subclass.symtab_checks(context=context, symbol_table=vmlinux_symbols):
                vollog.log(constants.LOGLEVEL_VVVV,
                           "NetFilter implementation %s doesn't match this memory dump", subclass.__name__)
                continue

            vollog.log(constants.LOGLEVEL_VVVV, "NetFilter implementation %s matches!", subclass.__name__)
            nfimp_inst = subclass(context, layer_name, vmlinux_symbols)
            # More than one class could be executed for an specific kernel
            # version i.e. Netfilter Ingress hooks
            yield from nfimp_inst.run()

        if nfimp_inst is None:
            vollog.error("Unsupported Netfilter kernel implementation")
Пример #2
0
    def __init__(
        self,
        context: interfaces.context.ContextInterface,
        config_path: str,
        name: str,
        isf_url: str,
        native_types: interfaces.symbols.NativeTableInterface = None,
        table_mapping: Optional[Dict[str, str]] = None,
        validate: bool = True,
        class_types: Optional[Dict[
            str, Type[interfaces.objects.ObjectInterface]]] = None
    ) -> None:
        """Instantiates a SymbolTable based on an IntermediateSymbolFormat JSON file.  This is validated against the
        appropriate schema.  The validation can be disabled by passing validate = False, but this should almost never be
        done.

        Args:
            context: The volatility context for the symbol table
            config_path: The configuration path for the symbol table
            name: The name for the symbol table (this is used in symbols e.g. table!symbol )
            isf_url: The URL pointing to the ISF file location
            native_types: The NativeSymbolTable that contains the native types for this symbol table
            table_mapping: A dictionary linking names referenced in the file with symbol tables in the context
            validate: Determines whether the ISF file will be validated against the appropriate schema
            class_types: A dictionary of type names and classes that override StructType when they are instantiated
        """
        # Check there are no obvious errors
        # Open the file and test the version
        self._versions = dict([(x.version, x)
                               for x in class_subclasses(ISFormatTable)])
        fp = volatility.framework.layers.resources.ResourceAccessor().open(
            isf_url)
        reader = codecs.getreader("utf-8")
        json_object = json.load(reader(fp))  # type: ignore
        fp.close()

        # Validation is expensive, but we cache to store the hashes of successfully validated json objects
        if validate and not schemas.validate(json_object):
            raise exceptions.SymbolSpaceError(
                "File does not pass version validation: {}".format(isf_url))

        metadata = json_object.get('metadata', None)

        # Determine the delegate or throw an exception
        self._delegate = self._closest_version(metadata.get(
            'format', "0.0.0"), self._versions)(context, config_path, name,
                                                json_object, native_types,
                                                table_mapping)

        # Inherit
        super().__init__(context,
                         config_path,
                         name,
                         native_types or self._delegate.natives,
                         table_mapping=table_mapping,
                         class_types=class_types)
Пример #3
0
 def create_stackers_list(self):
     """Creates the list of stackers to use based on the config option"""
     stack_set = sorted(framework.class_subclasses(
         interfaces.automagic.StackerLayerInterface),
                        key=lambda x: x.stack_order)
     stacker_list = self.config.get('stackers', [])
     if len(stacker_list):
         result = []
         for stacker in stack_set:
             if stacker.__name__ in stacker_list:
                 result.append(stacker)
         stack_set = result
     return stack_set
    def _generator(self):
        categories = {
            'Automagic': interfaces.automagic.AutomagicInterface,
            'Requirement': interfaces.configuration.RequirementInterface,
            'Layer': interfaces.layers.DataLayerInterface,
            'Object': interfaces.objects.ObjectInterface,
            'Plugin': interfaces.plugins.PluginInterface,
            'Renderer': interfaces.renderers.Renderer
        }

        for category, module_interface in categories.items():
            yield (0, (category, ))
            for clazz in framework.class_subclasses(module_interface):
                yield (1, (clazz.__name__, ))
Пример #5
0
def choose_os_stackers(plugin):
    """Identifies the stackers that should be run, based on the plugin (and thus os) provided"""
    plugin_first_level = plugin.__module__.split('.')[2]

    # Ensure all stackers are loaded
    framework.import_files(sys.modules['volatility.framework.layers'])

    result = []
    for stacker in sorted(framework.class_subclasses(
            interfaces.automagic.StackerLayerInterface),
                          key=lambda x: x.stack_order):
        if plugin_first_level in stacker.exclusion_list:
            continue
        result.append(stacker.__name__)
    return result
Пример #6
0
    def __init__(self,
                 progress_callback: Optional[
                     constants.ProgressCallback] = None,
                 context: Optional[ssl.SSLContext] = None) -> None:
        """Creates a resource accessor.

        Note: context is an SSL context, not a volatility context
        """
        self._progress_callback = progress_callback
        self._context = context
        self._handlers = list(
            framework.class_subclasses(urllib.request.BaseHandler))
        if self.list_handlers:
            vollog.log(
                constants.LOGLEVEL_VVV, "Available URL handlers: {}".format(
                    ", ".join([x.__name__ for x in self._handlers])))
            self.__class__.list_handlers = False
Пример #7
0
def available(context: interfaces.context.ContextInterface) -> List[interfaces.automagic.AutomagicInterface]:
    """Returns an ordered list of all subclasses of
    :class:`~volatility.framework.interfaces.automagic.AutomagicInterface`.

    The order is based on the priority attributes of the subclasses, in order to ensure the automagics are listed in
    an appropriate order.

    Args:
        context: The context that will contain any automagic configuration values.
    """
    import_files(sys.modules[__name__])
    config_path = constants.AUTOMAGIC_CONFIG_PATH
    return sorted([
        clazz(context, interfaces.configuration.path_join(config_path, clazz.__name__))
        for clazz in class_subclasses(interfaces.automagic.AutomagicInterface)
    ],
                  key = lambda x: x.priority)
Пример #8
0
    def get_usable_plugins(cls, selected_list: List[str] = None) -> List[Type]:
        # Initialize for the run
        plugin_list = list(framework.class_subclasses(TimeLinerInterface))

        # Get the filter from the configuration
        def passthrough(name: str, selected: List[str]) -> bool:
            return True

        filter_func = passthrough
        if selected_list:

            def filter_plugins(name: str, selected: List[str]) -> bool:
                return any([s in name for s in selected])

            filter_func = filter_plugins
        else:
            selected_list = []

        return [plugin_class for plugin_class in plugin_list if filter_func(plugin_class.__name__, selected_list)]
Пример #9
0
from urllib import parse, request
from volatility.cli import text_renderer
from volatility import framework
from volatility.framework.automagic import stacker
from volatility.framework import automagic, constants, contexts, exceptions, interfaces, plugins, configuration

config = configparser.ConfigParser()
config.read('config_syslog.ini')
pluginname = str(config["plugin"]["value"])
filename = str(config["location"]["file"])

framework.import_files(volatility.plugins, True)
renderers = dict([
    (x.name.lower(), x)
    for x in framework.class_subclasses(text_renderer.CLIRenderer)
])
ctx = contexts.Context()

single_location = "file:" + request.pathname2url(filename)

ctx.config['automagic.LayerStacker.single_location'] = single_location
automagics = automagic.available(ctx)
plugin_list = framework.list_plugins()

plugin = plugin_list[pluginname]
automagics = automagic.choose_automagic(automagics, plugin)
if ctx.config.get('automagic.LayerStacker.stackers', None) is None:
    ctx.config['automagic.LayerStacker.stackers'] = stacker.choose_os_stackers(
        plugin)
base_config_path = "plugins"
Пример #10
0
    def run(self, argstring):
        # Make sure we log everything

        vollog = logging.getLogger()
        #vollog.setLevel(1)
        # Trim the console down by default
        console = logging.StreamHandler()
        #console.setLevel(logging.FATAL)
        formatter = logging.Formatter(
            '%(levelname)-8s %(name)-12s: %(message)s')
        console.setFormatter(formatter)
        vollog.addHandler(console)
        # Make sure we log everything
        vollog = logging.getLogger()
        #vollog.setLevel(1)
        # Trim the console down by default
        console = logging.StreamHandler()
        #console.setLevel(logging.WARNING)
        formatter = logging.Formatter(
            '%(levelname)-8s %(name)-12s: %(message)s')
        console.setFormatter(formatter)
        vollog.addHandler(console)
        arg_arr = shlex.split(argstring)
        """Executes the command line module, taking the system arguments,
            determining the plugin to run and then running it."""
        sys.stdout.write("Volatility 3 Framework {}\n".format(
            constants.PACKAGE_VERSION))

        volatility.framework.require_interface_version(1, 0, 0)

        renderers = dict([
            (x.name.lower(), x)
            for x in framework.class_subclasses(text_renderer.CLIRenderer)
        ])

        parser = argparse.ArgumentParser(
            prog='volatility',
            description="An open-source memory forensics framework")
        parser.add_argument("-c",
                            "--config",
                            help="Load the configuration from a json file",
                            default=None,
                            type=str)
        parser.add_argument(
            "--parallelism",
            help=
            "Enables parallelism (defaults to processes if no argument given)",
            nargs='?',
            choices=['processes', 'threads', 'off'],
            const='processes',
            default=None,
            type=str)
        parser.add_argument(
            "-e",
            "--extend",
            help="Extend the configuration with a new (or changed) setting",
            default=None,
            action='append')
        parser.add_argument(
            "-p",
            "--plugin-dirs",
            help="Semi-colon separated list of paths to find plugins",
            default="",
            type=str)
        parser.add_argument(
            "-s",
            "--symbol-dirs",
            help="Semi-colon separated list of paths to find symbols",
            default="",
            type=str)
        parser.add_argument("-v",
                            "--verbosity",
                            help="Increase output verbosity",
                            default=0,
                            action="count")
        parser.add_argument("-l",
                            "--log",
                            help="Log output to a file as well as the console",
                            default=None,
                            type=str)
        parser.add_argument(
            "-o",
            "--output-dir",
            help="Directory in which to output any generated files",
            default=os.path.abspath(
                os.path.join(os.path.dirname(__file__), '..', '..')),
            type=str)
        parser.add_argument("-q",
                            "--quiet",
                            help="Remove progress feedback",
                            default=False,
                            action='store_true')
        parser.add_argument(
            "-r",
            "--renderer",
            metavar='RENDERER',
            help="Determines how to render the output ({})".format(", ".join(
                list(renderers))),
            default="quick",
            choices=list(renderers))
        parser.add_argument(
            "-f",
            "--file",
            metavar='FILE',
            default=None,
            type=str,
            help=
            "Shorthand for --single-location=file:// if single-location is not defined"
        )
        parser.add_argument(
            "--write-config",
            help="Write configuration JSON file out to config.json",
            default=False,
            action='store_true')

        # We have to filter out help, otherwise parse_known_args will trigger the help message before having
        # processed the plugin choice or had the plugin subparser added.
        known_args = [
            arg for arg in arg_arr if arg != '--help' and arg != '-h'
        ]
        partial_args, _ = parser.parse_known_args(known_args)
        if partial_args.plugin_dirs:
            volatility.plugins.__path__ = [
                os.path.abspath(p) for p in partial_args.plugin_dirs.split(";")
            ] + constants.PLUGINS_PATH

        if partial_args.symbol_dirs:
            volatility.symbols.__path__ = [
                os.path.abspath(p) for p in partial_args.symbol_dirs.split(";")
            ] + constants.SYMBOL_BASEPATHS

        if partial_args.log:
            file_logger = logging.FileHandler(partial_args.log)
            #file_logger.setLevel(1)
            file_formatter = logging.Formatter(
                datefmt='%y-%m-%d %H:%M:%S',
                fmt='%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
            file_logger.setFormatter(file_formatter)
            vollog.addHandler(file_logger)
            vollog.info("Logging started")
        if partial_args.verbosity < 3:
            console.setLevel(30 - (partial_args.verbosity * 10))
        else:
            console.setLevel(10 - (partial_args.verbosity - 2))
        #console.setLevel(0)

        vollog.info("Volatility plugins path: {}".format(
            volatility.plugins.__path__))
        vollog.info("Volatility symbols path: {}".format(
            volatility.symbols.__path__))

        # Set the PARALLELISM
        if partial_args.parallelism == 'processes':
            constants.PARALLELISM = constants.Parallelism.Multiprocessing
        elif partial_args.parallelism == 'threading':
            constants.PARALLELISM = constants.Parallelism.Threading
        else:
            constants.PARALLELISM = constants.Parallelism.Off

        # Do the initialization
        ctx = contexts.Context()  # Construct a blank context
        failures = framework.import_files(
            volatility.plugins,
            True)  # Will not log as console's default level is WARNING
        if failures:
            parser.epilog = "The following plugins could not be loaded (use -vv to see why): " + \
                ", ".join(sorted(failures))
            vollog.info(parser.epilog)
        automagics = automagic.available(ctx)

        plugin_list = framework.list_plugins()

        seen_automagics = set()
        configurables_list = {}
        for amagic in automagics:
            if amagic in seen_automagics:
                continue
            seen_automagics.add(amagic)
            if isinstance(amagic,
                          interfaces.configuration.ConfigurableInterface):
                self.populate_requirements_argparse(parser, amagic.__class__)
                configurables_list[amagic.__class__.__name__] = amagic

        subparser = parser.add_subparsers(title="Plugins",
                                          dest="plugin",
                                          action=HelpfulSubparserAction)
        for plugin in sorted(plugin_list):
            plugin_parser = subparser.add_parser(
                plugin, help=plugin_list[plugin].__doc__)
            self.populate_requirements_argparse(plugin_parser,
                                                plugin_list[plugin])
            configurables_list[plugin] = plugin_list[plugin]

        ###
        # PASS TO UI
        ###
        # Hand the plugin requirements over to the CLI (us) and let it construct the config tree

        # Run the argparser
        args = parser.parse_args(arg_arr)
        print(partial_args.verbosity)
        print(args.plugin, type(args.plugin))
        if args.plugin is None:
            parser.error("Please select a plugin to run")

        vollog.log(constants.LOGLEVEL_VVV,
                   "Cache directory used: {}".format(constants.CACHE_PATH))

        plugin = plugin_list[args.plugin]
        base_config_path = "plugins"
        plugin_config_path = interfaces.configuration.path_join(
            base_config_path, plugin.__name__)

        # Special case the -f argument because people use is so frequently
        # It has to go here so it can be overridden by single-location if it's defined
        # NOTE: This will *BREAK* if LayerStacker, or the automagic configuration system, changes at all
        ###
        if args.file:
            file_name = os.path.abspath(args.file)
            if not os.path.exists(
                    file_name) and "panda.panda" not in file_name:
                print("File does not exist: {}".format(file_name))
            else:
                single_location = "file:" + request.pathname2url(file_name)
                ctx.config[
                    'automagic.LayerStacker.single_location'] = single_location

        # UI fills in the config, here we load it from the config file and do it before we process the CL parameters
        if args.config:
            with open(args.config, "r") as f:
                json_val = json.load(f)
                ctx.config.splice(
                    plugin_config_path,
                    interfaces.configuration.HierarchicalDict(json_val))

        self.populate_config(ctx, configurables_list, args, plugin_config_path)

        if args.extend:
            for extension in args.extend:
                if '=' not in extension:
                    raise ValueError(
                        "Invalid extension (extensions must be of the format \"conf.path.value='value'\")"
                    )
                address, value = extension[:extension.find('=')], json.loads(
                    extension[extension.find('=') + 1:])
                ctx.config[address] = value

        # It should be up to the UI to determine which automagics to run, so this is before BACK TO THE FRAMEWORK
        automagics = automagic.choose_automagic(automagics, plugin)
        self.output_dir = args.output_dir

        ###
        # BACK TO THE FRAMEWORK
        ###
        try:
            progress_callback = PrintedProgress()
            if args.quiet:
                progress_callback = MuteProgress()

            constructed = plugins.construct_plugin(ctx, automagics, plugin,
                                                   base_config_path,
                                                   progress_callback, self)
            #	return (ctx, automagics, plugin, base_config_path, progress_callback, self)

            if args.write_config:
                vollog.debug("Writing out configuration data to config.json")
                with open("config.json", "w") as f:
                    json.dump(dict(constructed.build_configuration()),
                              f,
                              sort_keys=True,
                              indent=2)
            # return StringTextRenderer().render(constructed.run())
            return constructed
        except exceptions.UnsatisfiedException as excp:
            self.process_exceptions(excp)
            parser.exit(
                1, "Unable to validate the plugin requirements: {}\n".format(
                    [x for x in excp.unsatisfied]))
Пример #11
0
    def __init__(self,
                 context: interfaces.context.ContextInterface,
                 config_path: str,
                 name: str,
                 isf_url: str,
                 native_types: interfaces.symbols.NativeTableInterface = None,
                 table_mapping: Optional[Dict[str, str]] = None,
                 validate: bool = True,
                 class_types: Optional[Mapping[
                     str, Type[interfaces.objects.ObjectInterface]]] = None,
                 symbol_shift: int = 0,
                 symbol_mask: int = 0) -> None:
        """Instantiates a SymbolTable based on an IntermediateSymbolFormat JSON file.  This is validated against the
        appropriate schema.  The validation can be disabled by passing validate = False, but this should almost never be
        done.

        Args:
            context: The volatility context for the symbol table
            config_path: The configuration path for the symbol table
            name: The name for the symbol table (this is used in symbols e.g. table!symbol )
            isf_url: The URL pointing to the ISF file location
            native_types: The NativeSymbolTable that contains the native types for this symbol table
            table_mapping: A dictionary linking names referenced in the file with symbol tables in the context
            validate: Determines whether the ISF file will be validated against the appropriate schema
            class_types: A dictionary of type names and classes that override StructType when they are instantiated
            symbol_shift: An offset by which to alter all returned symbols for this table
            symbol_mask: An address mask used for all returned symbol offsets from this table (a mask of 0 disables masking)
        """
        # Check there are no obvious errors
        # Open the file and test the version
        self._versions = dict([(x.version, x)
                               for x in class_subclasses(ISFormatTable)])
        fp = volatility.framework.layers.resources.ResourceAccessor().open(
            isf_url)
        reader = codecs.getreader("utf-8")
        json_object = json.load(reader(fp))  # type: ignore
        fp.close()

        # Validation is expensive, but we cache to store the hashes of successfully validated json objects
        if validate and not schemas.validate(json_object):
            raise exceptions.SymbolSpaceError(
                "File does not pass version validation: {}".format(isf_url))

        metadata = json_object.get('metadata', None)

        # Determine the delegate or throw an exception
        self._delegate = self._closest_version(metadata.get(
            'format', "0.0.0"), self._versions)(context, config_path, name,
                                                json_object, native_types,
                                                table_mapping)
        if self._delegate.version < constants.ISF_MINIMUM_SUPPORTED:
            raise RuntimeError(
                "ISF version {} is no longer supported: {}".format(
                    metadata.get('format', "0.0.0"), isf_url))
        elif self._delegate.version < constants.ISF_MINIMUM_DEPRECATED:
            vollog.warning("ISF version {} has been deprecated: {}".format(
                metadata.get('format', "0.0.0"), isf_url))

        # Inherit
        super().__init__(context,
                         config_path,
                         name,
                         native_types or self._delegate.natives,
                         table_mapping=table_mapping,
                         class_types=class_types)

        # Since we've been created with parameters, ensure our config is populated likewise
        self.config['isf_url'] = isf_url
        self.config['symbol_shift'] = symbol_shift
        self.config['symbol_mask'] = symbol_mask
Пример #12
0
    def stack_layer(cls,
                    context: interfaces.context.ContextInterface,
                    initial_layer: str,
                    stack_set: List[Type[
                        interfaces.automagic.StackerLayerInterface]] = None,
                    progress_callback: constants.ProgressCallback = None):
        """Stacks as many possible layers on top of the initial layer as can be done.

        WARNING: This modifies the context provided and may pollute it with unnecessary layers
        Recommended use is to:
        1. Pass in context.clone() instead of context
        2. When provided the layer list, choose the desired layer
        3. Build the configuration using layer.build_configuration()
        4. Merge the configuration into the original context with context.config.merge()
        5. Call Construction magic to reconstruct the layers from just the configuration

        Args:
            context: The context on which to operate
            initial_layer: The name of the initial layer within the context
            stack_set: A list of StackerLayerInterface objects in the order they should be stacked
            progress_callback: A function to report progress during the process

        Returns:
            A list of layer names that exist in the provided context, stacked in order (highest to lowest)
        """
        # Repeatedly apply "determine what this is" code and build as much up as possible
        stacked = True
        stacked_layers = [initial_layer]
        if stack_set is None:
            stack_set = list(
                framework.class_subclasses(
                    interfaces.automagic.StackerLayerInterface))

        for stacker_item in stack_set:
            if not issubclass(stacker_item,
                              interfaces.automagic.StackerLayerInterface):
                raise TypeError(
                    "Stacker {} is not a descendent of StackerLayerInterface".
                    format(stacker_item.__name__))

        while stacked:
            stacked = False
            new_layer = None
            stacker_cls = None
            for stacker_cls in stack_set:
                stacker = stacker_cls()
                try:
                    vollog.log(
                        constants.LOGLEVEL_VV,
                        "Attempting to stack using {}".format(
                            stacker_cls.__name__))
                    new_layer = stacker.stack(context, initial_layer,
                                              progress_callback)
                    if new_layer:
                        context.layers.add_layer(new_layer)
                        vollog.log(
                            constants.LOGLEVEL_VV,
                            "Stacked {} using {}".format(
                                new_layer.name, stacker_cls.__name__))
                        break
                except Exception as excp:
                    # Stacking exceptions are likely only of interest to developers, so the lowest level of logging
                    fulltrace = traceback.TracebackException.from_exception(
                        excp).format(chain=True)
                    vollog.log(
                        constants.LOGLEVEL_VVV,
                        "Exception during stacking: {}".format(str(excp)))
                    vollog.log(constants.LOGLEVEL_VVVV, "\n".join(fulltrace))
            else:
                stacked = False
            if new_layer and stacker_cls:
                stacked_layers = [new_layer.name] + stacked_layers
                initial_layer = new_layer.name
                stacked = True
                stack_set.remove(stacker_cls)
        return stacked_layers
Пример #13
0
    def stack(self, context: interfaces.context.ContextInterface, config_path: str,
              requirement: interfaces.configuration.RequirementInterface,
              progress_callback: constants.ProgressCallback) -> None:
        """Stacks the various layers and attaches these to a specific
        requirement.

        Args:
            context: Context on which to operate
            config_path: Configuration path under which to store stacking data
            requirement: Requirement that should have layers stacked on it
            progress_callback: Function to provide callback progress
        """
        # If we're cached, find Now we need to find where to apply the stack configuration
        if self._cached:
            top_layer_name, subconfig = self._cached
            result = self.find_suitable_requirements(context, config_path, requirement, [top_layer_name])
            if result:
                appropriate_config_path, layer_name = result
                context.config.merge(appropriate_config_path, subconfig)
                context.config[appropriate_config_path] = top_layer_name
                return
            self._cached = None

        new_context = context.clone()
        location = self.config.get('single_location', None)

        # Setup the local copy of the resource
        current_layer_name = context.layers.free_layer_name("FileLayer")
        current_config_path = interfaces.configuration.path_join(config_path, "stack", current_layer_name)

        # This must be specific to get us started, setup the config and run
        new_context.config[interfaces.configuration.path_join(current_config_path, "location")] = location
        physical_layer = physical.FileLayer(new_context, current_config_path, current_layer_name)
        new_context.add_layer(physical_layer)

        # Repeatedly apply "determine what this is" code and build as much up as possible
        stacked = True
        stacked_layers = [current_layer_name]
        stack_set = sorted(framework.class_subclasses(interfaces.automagic.StackerLayerInterface),
                           key = lambda x: x.stack_order)
        while stacked:
            stacked = False
            new_layer = None
            stacker_cls = None
            for stacker_cls in stack_set:
                stacker = stacker_cls()
                try:
                    vollog.log(constants.LOGLEVEL_VV, "Attempting to stack using {}".format(stacker_cls.__name__))
                    new_layer = stacker.stack(new_context, current_layer_name, progress_callback)
                    if new_layer:
                        new_context.layers.add_layer(new_layer)
                        vollog.log(constants.LOGLEVEL_VV,
                                   "Stacked {} using {}".format(new_layer.name, stacker_cls.__name__))
                        break
                except Exception as excp:
                    # Stacking exceptions are likely only of interest to developers, so the lowest level of logging
                    fulltrace = traceback.TracebackException.from_exception(excp).format(chain = True)
                    vollog.log(constants.LOGLEVEL_VVV, "Exception during stacking: {}".format(str(excp)))
                    vollog.log(constants.LOGLEVEL_VVVV, "\n".join(fulltrace))
            else:
                stacked = False
            if new_layer and stacker_cls:
                stacked_layers = [new_layer.name] + stacked_layers
                current_layer_name = new_layer.name
                stacked = True
                stack_set.remove(stacker_cls)

        if stacked_layers is not None:
            # Applies the stacked_layers to each requirement in the requirements list
            result = self.find_suitable_requirements(new_context, config_path, requirement, stacked_layers)
            if result:
                path, layer = result
                # splice in the new configuration into the original context
                context.config.merge(path, new_context.layers[layer].build_configuration())

                # Call the construction magic now we may have new things to construct
                constructor = construct_layers.ConstructionMagic(
                    context, interfaces.configuration.path_join(self.config_path, "ConstructionMagic"))
                constructor(context, config_path, requirement)

                # Stash the changed config items
                self._cached = context.config.get(path, None), context.config.branch(path)

        vollog.debug("Stacked layers: {}".format(stacked_layers))