Esempio n. 1
0
    def _generator(self, layer_name, symbol_table, hive_offsets):
        chunk_size = 0x500000
        for hive in hivelist.HiveList.list_hives(self.context,
                                                 self.config_path,
                                                 layer_name=layer_name,
                                                 symbol_table=symbol_table,
                                                 hive_offsets=hive_offsets):

            maxaddr = hive.hive.Storage[0].Length
            hive_name = self._sanitize_hive_name(hive.get_name())

            filedata = plugins.FileInterface('registry.{}.{}.hive'.format(
                hive_name, hex(hive.hive_offset)))
            if hive._base_block:
                hive_data = self.context.layers[hive.dependencies[0]].read(
                    hive.hive.BaseBlock, 1 << 12)
            else:
                hive_data = '\x00' * (1 << 12)
            filedata.data.write(hive_data)

            for i in range(0, maxaddr, chunk_size):
                current_chunk_size = min(chunk_size, maxaddr - i)
                data = hive.read(i, current_chunk_size, pad=True)
                filedata.data.write(data)
                # if self._progress_callback:
                #     self._progress_callback((i / maxaddr) * 100, 'Writing layer {}'.format(hive_name))
            self.produce_file(filedata)
            yield (0, (hive.name, format_hints.Hex(hive.hive_offset),
                       'Written to {}'.format(filedata.preferred_filename)))
Esempio n. 2
0
    def _generator(self):
        if self.config.get('layer_name', None) is None:
            for layer_name in self.context.layers:
                yield 0, ("Layer '{}' available as '{}'".format(
                    layer_name,
                    self.context.layers[layer_name].__class__.__name__), )
        elif self.config['layer_name'] not in self.context.layers:
            yield 0, ('Layer Name does not exist', )
        elif os.path.exists(self.config.get('output',
                                            self.default_output_name)):
            yield 0, ('Refusing to overwrite existing output file', )
        else:
            chunk_size = self.config.get('block_size', self.default_block_size)
            layer = self.context.layers[self.config['layer_name']]

            try:
                filedata = plugins.FileInterface(
                    self.config.get('output', self.default_output_name))
                for i in range(0, layer.maximum_address, chunk_size):
                    current_chunk_size = min(chunk_size,
                                             layer.maximum_address - i)
                    data = layer.read(i, current_chunk_size, pad=True)
                    filedata.data.write(data)
                self.produce_file(filedata)
            except Exception as excp:
                vollog.warning("Unable to write out output file")

            yield 0, ('Layer {} has been written to {}'.format(
                self.config['layer_name'],
                self.config.get('output', self.default_output_name)), )
Esempio n. 3
0
    def write_layer(
        cls,
        context: interfaces.context.ContextInterface,
        layer_name: str,
        preferred_name: str,
        chunk_size: Optional[int] = None,
        progress_callback: Optional[constants.ProgressCallback] = None
    ) -> Optional[plugins.FileInterface]:
        """Produces a filedata from the named layer in the provided context

        Args:
            context: the context from which to read the memory layer
            layer_name: the name of the layer to write out
            preferred_name: a string with the preferred filename for hte file
            chunk_size: an optional size for the chunks that should be written (defaults to 0x500000)
            progress_callback: an optional function that takes a percentage and a string that displays output
        """

        if layer_name not in context.layers:
            raise exceptions.LayerException("Layer not found")
        layer = context.layers[layer_name]

        if chunk_size is None:
            chunk_size = cls.default_block_size

        filedata = plugins.FileInterface(preferred_name)
        for i in range(0, layer.maximum_address, chunk_size):
            current_chunk_size = min(chunk_size, layer.maximum_address - i)
            data = layer.read(i, current_chunk_size, pad=True)
            filedata.data.write(data)
            if progress_callback:
                progress_callback((i / layer.maximum_address) * 100,
                                  'Writing layer {}'.format(layer_name))
        return filedata
Esempio n. 4
0
    def _generator(self):
        filename = "config.json"
        config = dict(self.build_configuration())
        if self.config.get('extra', False):
            vollog.debug(
                "Outputting additional information, this will NOT work with the -c option"
            )
            config = dict(self.context.config)
            filename = "config.extra"
        try:
            filedata = plugins.FileInterface(filename)
            filedata.data.write(
                bytes(json.dumps(config, sort_keys=True, indent=2), 'latin-1'))
            self.produce_file(filedata)
        except Exception:
            vollog.warning("Unable to JSON encode configuration")

        for k, v in config.items():
            yield (0, (k, json.dumps(v)))
Esempio n. 5
0
    def _generator(self) -> Iterator[Tuple[int, Tuple[int, str]]]:
        chunk_size = 0x500000

        for hive_object in self.list_hive_objects(
                context=self.context,
                layer_name=self.config["primary"],
                symbol_table=self.config["nt_symbols"],
                filter_string=self.config.get('filter', None)):

            dumped = False
            if self.config['dump']:
                # Construct the hive
                hive = next(
                    self.list_hives(self.context,
                                    self.config_path,
                                    layer_name=self.config["primary"],
                                    symbol_table=self.config["nt_symbols"],
                                    hive_offsets=[hive_object.vol.offset]))
                maxaddr = hive.hive.Storage[0].Length
                hive_name = self._sanitize_hive_name(hive.get_name())

                filedata = plugins.FileInterface('registry.{}.{}.hive'.format(
                    hive_name, hex(hive.hive_offset)))
                if hive._base_block:
                    hive_data = self.context.layers[hive.dependencies[0]].read(
                        hive.hive.BaseBlock, 1 << 12)
                else:
                    hive_data = '\x00' * (1 << 12)
                filedata.data.write(hive_data)

                for i in range(0, maxaddr, chunk_size):
                    current_chunk_size = min(chunk_size, maxaddr - i)
                    data = hive.read(i, current_chunk_size, pad=True)
                    filedata.data.write(data)
                    # if self._progress_callback:
                    #     self._progress_callback((i / maxaddr) * 100, 'Writing layer {}'.format(hive_name))
                self.produce_file(filedata)
                dumped = True

            yield (0, (format_hints.Hex(hive_object.vol.offset),
                       hive_object.get_name() or "", dumped))