Esempio n. 1
0
def output(base_key_full_path):
    """Performs the clean up of AltFS applicable values from the Registry"""
    hive, base_key_path = split_key_path_to_hive_and_path(base_key_full_path)
    with _winreg.OpenKey(HIVES[hive], base_key_path,
                         _winreg.KEY_SET_VALUE) as base_key:
        buckets_names = get_sub_keys(base_key)

    for bucket_name in buckets_names:
        print "[key] %s" % bucket_name
        altfs_applicable_values = []
        with get_bucket_key(HIVES[hive],
                            "%s\\%s" % (base_key_path, bucket_name)) as key:
            for value_name in get_sub_values(key):
                if is_value_name_applicable(buckets_names, value_name):
                    altfs_applicable_values.append(value_name)

        with get_bucket_key(HIVES[hive],
                            "%s\\%s" % (base_key_path, bucket_name),
                            desired_access=_winreg.KEY_QUERY_VALUE) as key:
            for value_name in altfs_applicable_values:
                print "\t[val] %s:" % value_name
                data, _type = _winreg.QueryValueEx(key, value_name)
                block = Block.generate_block_from_packed_str(data)
                # print pprint.pformat(block.__dict__, indent=4)
                print "\t      " + \
                    json.dumps(
                        block.__dict__,
                        indent=4,
                        sort_keys=True
                    ).replace("\n", "\n\t      ")
Esempio n. 2
0
File: AltFS.py Progetto: mmg1/AltFS
    def _get_block(self, bucket_id, value_id):
        """
        Loads the block the data from the desired value.

        Returns it as aBlock instance.
        Raises InternalStorageOperationException if provider has failed to read
        """
        try:
            block = Block.generate_block_from_packed_str(
                self._storage_provider.get_block(bucket_id, value_id))
        except Exception as e:
            logger.error("reading of block at (%s:%s) has failed: %s" %
                         (bucket_id, value_id, str(e)))
            raise InternalStorageOperationException(
                InternalStorageOperationException.OPERATION_READ, str(e))
        logger.debug("a block was read at (%s:%s):%s" %
                     (bucket_id, value_id, block.__dict__))
        return block
Esempio n. 3
0
File: AltFS.py Progetto: mmg1/AltFS
    def _load_descriptor(self):
        """
        Loads the descriptor instance from the superblock.

        Creates an empty descriptor if such block does not exist,
        and writes it to storage.
        """
        self._descriptor = Descriptor()
        try:  # try load the existing descriptor from superblock
            first_block_data = self._storage_provider.get_block(
                self._first_bucket_id, 0)
            block = Block.generate_block_from_packed_str(first_block_data)
            self._descriptor.__dict__ = block.data
        except BucketValueMissingException:  # superblock does not exist
            logger.error("superblock does not exist. Creating a new empty one")
            # create an empty descriptor and write it as a superblock (id=0)
            self._write_block(self._first_bucket_id, 0,
                              self._generate_descriptor_block())