コード例 #1
0
ファイル: fuse_mount.py プロジェクト: qsdj/grr
class GRRFuseDatastoreOnly(object):
    """We implement the FUSE methods in this class."""

    # Directories to hide. Readdir will not return them.
    ignored_dirs = [
        # We don't want to show AFF4Index objects.
        "/index/client"
    ]

    def __init__(self, root="/", token=None):
        self.root = rdfvalue.RDFURN(root)
        self.token = token
        self.default_file_mode = _DEFAULT_MODE_FILE
        self.default_dir_mode = _DEFAULT_MODE_DIRECTORY

        try:
            logging.info("Making sure supplied aff4path actually exists....")
            self.getattr(root)
            logging.info("OK")
        except fuse.FuseOSError:
            logging.info("Supplied aff4path didn't exist!")
            raise IOError("Supplied aff4 path '%s' does not exist." %
                          self.root)

    def MakePartialStat(self, fd):
        """Try and give a 'stat' for something not in the data store.

    Args:
      fd: The object with no stat.

    Returns:
      A dictionary corresponding to what we'll say the 'stat' is
      for objects which are not actually files, so have no OS level stat.

    """

        is_dir = "Container" in fd.behaviours

        return {
            "pathspec": fd.Get(fd.Schema.PATHSPEC, ""),
            "st_atime": fd.Get(fd.Schema.LAST, 0),
            "st_blksize": 0,
            "st_blocks": 0,
            "st_ctime": 0,
            "st_dev": 0,
            "st_gid": 0,
            "st_ino": 0,
            "st_mode":
            self.default_dir_mode if is_dir else self.default_file_mode,
            "st_mtime": 0,
            "st_nlink": 0,
            "st_rdev": 0,
            "st_size": fd.Get(fd.Schema.SIZE, 0),
            "st_uid": 0
        }

    def _IsDir(self, path):
        """True if and only if the path has the directory bit set in its mode."""
        return stat.S_ISDIR(int(self.getattr(path)["st_mode"]))

    def Readdir(self, path, fh=None):
        """Reads a directory given by path.

    Args:
      path: The path to list children of.
      fh: A file handler. Not used.

    Yields:
      A generator of filenames.

    Raises:
      FuseOSError: If we try and list a file.

    """
        del fh

        # We can't read a path if it's a file.
        if not self._IsDir(path):
            raise fuse.FuseOSError(errno.ENOTDIR)

        fd = aff4.FACTORY.Open(self.root.Add(path), token=self.token)

        children = fd.ListChildren()

        # Make these special directories unicode to be consistent with the rest of
        # aff4.
        for directory in [u".", u".."]:
            yield directory

        # ListChildren returns a generator, so we do the same.
        for child in children:
            # Filter out any directories we've chosen to ignore.
            if child.Path() not in self.ignored_dirs:
                yield child.Basename()

    def Getattr(self, path, fh=None):
        """Performs a stat on a file or directory.

    Args:
      path: The path to stat.
      fh: A file handler. Not used.

    Returns:
      A dictionary mapping st_ names to their values.

    Raises:
      FuseOSError: When a path is supplied that grr doesn't know about, ie an
      invalid file path.
      ValueError: If an empty path is passed. (The empty string, when passed to
      self.root.Add, returns a path for aff4:/, the root directory, which is not
      the behaviour we want.)
    """
        del fh

        if not path:
            raise fuse.FuseOSError(errno.ENOENT)

        if path != self.root:
            full_path = self.root.Add(path)
        else:
            full_path = path

        fd = aff4.FACTORY.Open(full_path, token=self.token)

        # The root aff4 path technically doesn't exist in the data store, so
        # it is a special case.
        if full_path == "/":
            return self.MakePartialStat(fd)

        fd = aff4.FACTORY.Open(full_path, token=self.token)
        # Grab the stat according to aff4.
        aff4_stat = fd.Get(fd.Schema.STAT)

        # If the Schema for the object has a STAT attribute, go ahead and return
        # it as a dictionary.
        if aff4_stat:
            return aff4_stat.AsDict()

        # If the object didn't have a stored stat, we figure out if it is a special
        # grr object, or just doesn't exist.

        # We now check if the aff4 object actually has a row in the data store.
        # This prevents us from being able to cd to directories that don't exist,
        # since such directories have a newly-created empty AFF4Object,
        # but no row in the data store. Anything that is a
        # row in the data store will have a LAST attribute, so we check that.
        elif fd.Get(fd.Schema.LAST) is None:
            # We raise the "no such file or directory" error.
            raise fuse.FuseOSError(errno.ENOENT)
        else:
            # This is an object that exists in the datastore, but has no STAT, so we
            # don't know how to handle it.
            pass

        # If the object was in the data store, but didn't have a stat, we just
        # try and guess some sensible values.
        return self.MakePartialStat(fd)

    def Read(self, path, length=None, offset=0, fh=None):
        """Reads data from a file.

    Args:
      path: The path to the file to read.
      length: How many bytes to read.
      offset: Offset in bytes from which reading should start.
      fh: A file handler. Not used.

    Returns:
      A string containing the file contents requested.

    Raises:
      FuseOSError: If we try and read a directory or if we try and read an
      object that doesn't support reading.

    """
        del fh

        if self._IsDir(path):
            raise fuse.FuseOSError(errno.EISDIR)

        fd = aff4.FACTORY.Open(self.root.Add(path), token=self.token)

        # If the object has Read() and Seek() methods, let's use them.
        if all((hasattr(fd, "Read"), hasattr(fd, "Seek"), callable(fd.Read),
                callable(fd.Seek))):
            # By default, read the whole file.
            if length is None:
                length = fd.Get(fd.Schema.SIZE)

            fd.Seek(offset)
            return fd.Read(length)
        else:
            # If we don't have Read/Seek methods, we probably can't read this object.
            raise fuse.FuseOSError(errno.EIO)

    def RaiseReadOnlyError(self):
        """Raise an error complaining that the file system is read-only."""
        raise fuse.FuseOSError(errno.EROFS)

    # pylint: disable=invalid-name
    def mkdir(self, *unused_args, **unused_kwargs):
        """Unimplemented on purpose. File system is read-only."""
        self.RaiseReadOnlyError()

    def symlink(self, *unused_args, **unused_kwargs):
        """Unimplemented on purpose. File system is read-only."""
        self.RaiseReadOnlyError()

    def rename(self, *unused_args, **unused_kwargs):
        """Unimplemented on purpose. File system is read-only."""
        self.RaiseReadOnlyError()

    def link(self, *unused_args, **unused_kwargs):
        """Unimplemented on purpose. File system is read-only."""
        self.RaiseReadOnlyError()

    def write(self, *unused_args, **unused_kwargs):
        """Unimplemented on purpose. File system is read-only."""
        self.RaiseReadOnlyError()

    def truncate(self, *unused_args, **unused_kwargs):
        """Unimplemented on purpose. File system is read-only."""
        self.RaiseReadOnlyError()

    def create(self, *unused_args, **unused_kwargs):
        """Unimplemented on purpose. File system is read-only."""
        self.RaiseReadOnlyError()

    # pylint: enable=unused-argument,invalid-name

    # FUSE expects the names of the functions to be standard
    # filesystem function style (all lower case), so we set them so here.

    read = utils.Proxy("Read")
    readdir = utils.Proxy("Readdir")
    getattr = utils.Proxy("Getattr")
コード例 #2
0
ファイル: protodict.py プロジェクト: rainser/grr
class Dict(rdf_structs.RDFProtoStruct):
    """A high level interface for protobuf Dict objects.

  This effectively converts from a dict to a proto and back.
  The dict may contain strings (python unicode objects), int64,
  or binary blobs (python string objects) as keys and values.
  """
    protobuf = jobs_pb2.Dict
    rdf_deps = [
        KeyValue,
    ]

    _values = None

    def __init__(self, initializer=None, age=None, **kwarg):
        super(Dict, self).__init__(initializer=None, age=age)

        # Support initializing from a mapping
        if isinstance(initializer, dict):
            self.FromDict(initializer)

        # Can be initialized from kwargs (like a dict).
        elif initializer is None:
            self.FromDict(kwarg)

        # Initialize from another Dict.
        elif isinstance(initializer, Dict):
            self.FromDict(initializer.ToDict())
            self.age = initializer.age

        else:
            raise rdfvalue.InitializeError(
                "Invalid initializer for ProtoDict.")

    def ToDict(self):
        result = {}
        for x in self._values.values():
            key = x.k.GetValue()
            result[key] = x.v.GetValue()
            try:
                # Try to unpack nested AttributedDicts
                result[key] = result[key].ToDict()
            except AttributeError:
                pass

        return result

    def FromDict(self, dictionary, raise_on_error=True):
        # First clear and then set the dictionary.
        self._values = {}
        for key, value in dictionary.iteritems():
            self._values[key] = KeyValue(
                k=DataBlob().SetValue(key, raise_on_error=raise_on_error),
                v=DataBlob().SetValue(value, raise_on_error=raise_on_error))
        self.dat = self._values.values()
        return self

    def __getitem__(self, key):
        return self._values[key].v.GetValue()

    def __contains__(self, key):
        return key in self._values

    def GetItem(self, key, default=None):
        if key in self._values:
            return self._values[key].v.GetValue()
        return default

    def Items(self):
        for x in self._values.itervalues():
            yield x.k.GetValue(), x.v.GetValue()

    def Values(self):
        for x in self._values.itervalues():
            yield x.v.GetValue()

    def Keys(self):
        for x in self._values.itervalues():
            yield x.k.GetValue()

    get = utils.Proxy("GetItem")
    items = utils.Proxy("Items")
    keys = utils.Proxy("Keys")
    values = utils.Proxy("Values")

    def __delitem__(self, key):
        self.dat.dirty = True
        del self._values[key]

    def __len__(self):
        return len(self._values)

    def SetItem(self, key, value, raise_on_error=True):
        """Alternative to __setitem__ that can ignore errors.

    Sometimes we want to serialize a structure that contains some simple
    objects, and some that can't be serialized.  This method gives the caller a
    way to specify that they don't care about values that can't be
    serialized.

    Args:
      key: dict key
      value: dict value
      raise_on_error: if True, raise if we can't serialize.  If False, set the
        key to an error string.
    """
        self.dat.dirty = True
        self._values[key] = KeyValue(
            k=DataBlob().SetValue(key, raise_on_error=raise_on_error),
            v=DataBlob().SetValue(value, raise_on_error=raise_on_error))

    def __setitem__(self, key, value):
        self.dat.dirty = True
        self._values[key] = KeyValue(k=DataBlob().SetValue(key),
                                     v=DataBlob().SetValue(value))

    def __iter__(self):
        for x in self._values.itervalues():
            yield x.k.GetValue()

    def __eq__(self, other):
        if isinstance(other, dict):
            return self.ToDict() == other
        elif isinstance(other, Dict):
            return self.ToDict() == other.ToDict()
        else:
            return False

    def GetRawData(self):
        self.dat = self._values.values()
        return super(Dict, self).GetRawData()

    def _CopyRawData(self):
        self.dat = self._values.values()
        return super(Dict, self)._CopyRawData()

    def SetRawData(self, raw_data):
        super(Dict, self).SetRawData(raw_data)
        self._values = {}
        for d in self.dat:
            self._values[d.k.GetValue()] = d

    def SerializeToString(self):
        self.dat = self._values.values()
        return super(Dict, self).SerializeToString()

    def ParseFromString(self, value):
        super(Dict, self).ParseFromString(value)
        self._values = {}
        for d in self.dat:
            self._values[d.k.GetValue()] = d

    def __str__(self):
        return str(self.ToDict())
コード例 #3
0
ファイル: vfs.py プロジェクト: qsdj/grr
class VFSHandler(object):
    """Base class for handling objects in the VFS."""
    supported_pathtype = -1

    # Should this handler be auto-registered?
    auto_register = False

    size = 0
    offset = 0

    # This is the VFS path to this specific handler.
    path = "/"

    # This will be set by the VFSOpen factory to the pathspec of the final
    # destination of this handler. This pathspec will be case corrected and
    # updated to reflect any potential recursion.
    pathspec = None
    base_fd = None

    __metaclass__ = registry.MetaclassRegistry

    def __init__(self,
                 base_fd,
                 pathspec=None,
                 progress_callback=None,
                 full_pathspec=None):
        """Constructor.

    Args:
      base_fd: A handler to the predecessor handler.
      pathspec: The pathspec to open.
      progress_callback: A callback to indicate that the open call is still
                         working but needs more time.
      full_pathspec: The full pathspec we are trying to open.

    Raises:
      IOError: if this handler can not be instantiated over the
      requested path.
    """
        _ = pathspec
        _ = full_pathspec
        self.base_fd = base_fd
        self.progress_callback = progress_callback
        if base_fd is None:
            self.pathspec = rdf_paths.PathSpec()
        else:
            # Make a copy of the base pathspec.
            self.pathspec = base_fd.pathspec.Copy()
        self.metadata = {}

    def __enter__(self):
        return self

    def __exit__(self, exc_type, exc_value, traceback):
        self.Close()
        return False

    def Seek(self, offset, whence=os.SEEK_SET):
        """Seek to an offset in the file."""
        if whence == os.SEEK_SET:
            self.offset = offset
        elif whence == os.SEEK_CUR:
            self.offset += offset
        elif whence == os.SEEK_END:
            self.offset = self.size + offset
        else:
            raise ValueError("Illegal whence value %s" % whence)

    def Read(self, length):
        """Reads some data from the file."""
        raise NotImplementedError

    def Stat(self, path=None, ext_attrs=None):
        """Returns a StatEntry about this file."""
        del path, ext_attrs  # Unused.
        raise NotImplementedError

    def IsDirectory(self):
        """Returns true if this object can contain other objects."""
        raise NotImplementedError

    def Tell(self):
        return self.offset

    def Close(self):
        """Close internal file descriptors."""

    def OpenAsContainer(self):
        """Guesses a container from the current object."""
        if self.IsDirectory():
            return self

        # TODO(user): Add support for more containers here (e.g. registries, zip
        # files etc).
        else:  # For now just guess TSK.
            return VFS_HANDLERS[rdf_paths.PathSpec.PathType.TSK](
                self,
                rdf_paths.PathSpec(path="/",
                                   pathtype=rdf_paths.PathSpec.PathType.TSK),
                progress_callback=self.progress_callback)

    def MatchBestComponentName(self, component):
        """Returns the name of the component which matches best our base listing.

    In order to do the best case insensitive matching we list the files in the
    base handler and return the base match for this component.

    Args:
      component: A component name which should be present in this directory.

    Returns:
      the best component name.
    """
        fd = self.OpenAsContainer()

        # Adjust the component casing
        file_listing = set(fd.ListNames())

        # First try an exact match
        if component not in file_listing:
            # Now try to match lower case
            lower_component = component.lower()
            for x in file_listing:
                if lower_component == x.lower():
                    component = x
                    break

        if fd.supported_pathtype != self.pathspec.pathtype:
            new_pathspec = rdf_paths.PathSpec(path=component,
                                              pathtype=fd.supported_pathtype)
        else:
            new_pathspec = self.pathspec.last.Copy()
            new_pathspec.path = component

        return new_pathspec

    def ListFiles(self, ext_attrs=False):
        """An iterator over all VFS files contained in this directory.

    Generates a StatEntry for each file or directory.

    Args:
      ext_attrs: Whether stat entries should contain extended attributes.

    Raises:
      IOError: if this fails.
    """
        del ext_attrs  # Unused.

    def ListNames(self):
        """A generator for all names in this directory."""
        return []

    # These are file object conformant namings for library functions that
    # grr uses, and that expect to interact with 'real' file objects.
    read = utils.Proxy("Read")
    seek = utils.Proxy("Seek")
    stat = utils.Proxy("Stat")
    tell = utils.Proxy("Tell")
    close = utils.Proxy("Close")

    @classmethod
    def Open(cls,
             fd,
             component,
             pathspec=None,
             progress_callback=None,
             full_pathspec=None):
        """Try to correct the casing of component.

    This method is called when we failed to open the component directly. We try
    to transform the component into something which is likely to work.

    In this implementation, we correct the case of the component until we can
    not open the path any more.

    Args:
      fd: The base fd we will use.
      component: The component we should open.
      pathspec: The rest of the pathspec object.
      progress_callback: A callback to indicate that the open call is still
                         working but needs more time.
      full_pathspec: The full pathspec we are trying to open.

    Returns:
      A file object.

    Raises:
      IOError: If nothing could be opened still.
    """
        # The handler for this component
        try:
            handler = VFS_HANDLERS[component.pathtype]
        except KeyError:
            raise IOError("VFS handler %d not supported." % component.pathtype)

        # We will not do any case folding unless requested.
        if component.path_options == rdf_paths.PathSpec.Options.CASE_LITERAL:
            return handler(base_fd=fd, pathspec=component)

        path_components = client_utils.LocalPathToCanonicalPath(component.path)
        path_components = ["/"] + filter(None, path_components.split("/"))
        for i, path_component in enumerate(path_components):
            try:
                if fd:
                    new_pathspec = fd.MatchBestComponentName(path_component)
                else:
                    new_pathspec = component
                    new_pathspec.path = path_component

                # The handler for this component
                try:
                    handler = VFS_HANDLERS[new_pathspec.pathtype]
                except KeyError:
                    raise IOError("VFS handler %d not supported." %
                                  new_pathspec.pathtype)

                fd = handler(base_fd=fd,
                             pathspec=new_pathspec,
                             full_pathspec=full_pathspec,
                             progress_callback=progress_callback)
            except IOError:
                # Can not open the first component, we must raise here.
                if i <= 1:
                    raise IOError("File not found")

                # Insert the remaining path at the front of the pathspec.
                pathspec.Insert(0,
                                path=utils.JoinPath(*path_components[i:]),
                                pathtype=rdf_paths.PathSpec.PathType.TSK)
                break

        return fd

    def GetMetadata(self):
        return self.metadata