示例#1
0
    def utimens(self, raw_path, times=None):
        """Set the file times."""

        if times is not None:
            (atime, mtime) = times
        else:
            now = time()
            (atime, mtime) = (now, now)

        (entry, path, filename) = get_entry_or_raise(raw_path)

        mtime_phrase = get_flat_normal_fs_time_from_epoch(mtime)
        atime_phrase = get_flat_normal_fs_time_from_epoch(atime)

        gd = get_gdrive()

        try:
            entry = gd.update_entry(
                        entry, 
                        modified_datetime=mtime_phrase,
                        accessed_datetime=atime_phrase)
        except:
            _logger.exception("Could not update entry [%s] for times.",
                              entry)

            raise FuseOSError(EIO)

        return 0
示例#2
0
    def truncate(self, filepath, length, fh=None):
        if fh is not None:
            om = gdrivefs.opened_file.get_om()

            try:
                opened_file = om.get_by_fh(fh)
            except:
                _logger.exception("Could not retrieve OpenedFile for handle "
                                  "with ID (%d) (truncate).", fh)

                raise FuseOSError(EIO)

            _logger.debug("Truncating and clearing FH: %s", opened_file)

            opened_file.reset_state()

            entry_id = opened_file.entry_id
            cache = EntryCache.get_instance().cache
            entry = cache.get(entry_id)

            opened_file.truncate(length)
        else:
            (entry, path, filename) = get_entry_or_raise(filepath)

        gd = get_gdrive()

        try:
            entry = gd.truncate(entry)
        except:
            _logger.exception("Could not truncate entry [%s].", entry)
            raise FuseOSError(EIO)
示例#3
0
    def find_path_components_goandget(self, path):
        """Do the same thing that find_path_components() does, except that 
        when we don't have record of a path-component, try to go and find it 
        among the children of the previous path component, and then try again.
        """

        gd = get_gdrive()

        with PathRelations.rlock:
            previous_results = []
            i = 0
            while 1:
                #                self.__log.debug("Attempting to find path-components (go and "
                #                                 "get) for path [%s].  CYCLE= (%d)", path, i)

                # See how many components can be found in our current cache.

                result = self.__find_path_components(path)

                # If we could resolve the entire path, return success.

                if result[2] == True:
                    return result

                # If we could not resolve the entire path, and we're no more
                # successful than a prior attempt, we'll just have to return a
                # partial.

                num_results = len(result[0])
                if num_results in previous_results:
                    return result

                previous_results.append(num_results)

                # Else, we've encountered a component/depth of the path that we
                # don't currently know about.
                # TODO: This is going to be the general area that we'd have to adjust to
                #        support multiple, identical entries. This currently only considers the
                #        first result. We should rewrite this to be recursive in order to make
                #        it easier to keep track of a list of results.
                # The parent is the last one found, or the root if none.
                parent_id = result[0][num_results - 1] \
                                if num_results \
                                else AccountInfo.get_instance().root_id

                # The child will be the first part that was not found.
                child_name = result[1][num_results]

                children = gd.list_files(parent_id=parent_id,
                                         query_is_string=child_name)

                for child in children:
                    self.register_entry(child)

                filenames_phrase = ', '.join(
                    [candidate.id for candidate in children])
                #                self.__log.debug("(%d) candidate children were found: %s",
                #                                 len(children), filenames_phrase)

                i += 1
示例#4
0
    def __init__(self, *args, **kwargs):
        super(EntryCache, self).__init__(*args, **kwargs)

        # TODO(dustin): This isn't used, and we don't think that it necessarily needs
        #               to be instantiated, now.
        #        about = AccountInfo.get_instance()
        self.__gd = get_gdrive()
示例#5
0
    def rename(self, filepath_old, filepath_new):
        # Make sure the old filepath exists.
        (entry, path, filename_old) = get_entry_or_raise(filepath_old)

        # At this point, decorations, the is-hidden prefix, etc.. haven't been
        # stripped.
        (path, filename_new_raw) = split(filepath_new)

        # Make sure the new filepath doesn't exist.

        try:
            get_entry_or_raise(filepath_new, True)
        except GdNotFoundError:
            pass

        gd = get_gdrive()

        try:
            entry = gd.rename(entry, filename_new_raw)
        except:
            _logger.exception("Could not update entry [%s] for rename.", entry)
            raise FuseOSError(EIO)

        # Update our knowledge of the entry.

        path_relations = PathRelations.get_instance()

        try:
            path_relations.register_entry(entry)
        except:
            _logger.exception("Could not register renamed entry: %s", entry)
            raise FuseOSError(EIO)
示例#6
0
    def rmdir(self, filepath):
        """Remove a directory."""

        path_relations = PathRelations.get_instance()

        try:
            entry_clause = path_relations.get_clause_from_path(filepath)
        except GdNotFoundError:
            _logger.exception("Could not process [%s] (rmdir).", filepath)
            raise FuseOSError(ENOENT)
        except:
            _logger.exception("Could not get clause from file-path [%s] "
                              "(rmdir).", filepath)
            raise FuseOSError(EIO)

        if not entry_clause:
            _logger.error("Path [%s] does not exist for rmdir().", filepath)
            raise FuseOSError(ENOENT)

        entry_id = entry_clause[CLAUSE_ID]
        normalized_entry = entry_clause[CLAUSE_ENTRY]

        # Check if not a directory.

        if not normalized_entry.is_directory:
            _logger.error("Can not rmdir() non-directory [%s] with ID [%s].", 
                          filepath, entry_id)

            raise FuseOSError(ENOTDIR)

        # Ensure the folder is empty.

        gd = get_gdrive()

        try:
            found = gd.get_children_under_parent_id(
                        entry_id,
                        max_results=1)
        except:
            _logger.exception("Could not determine if directory to be removed "
                              "has children.", entry_id)

            raise FuseOSError(EIO)

        if found:
            raise FuseOSError(ENOTEMPTY)

        try:
            gd.remove_entry(normalized_entry)
        except (NameError):
            raise FuseOSError(ENOENT)
        except:
            _logger.exception("Could not remove directory [%s] with ID [%s].",
                              filepath, entry_id)

            raise FuseOSError(EIO)
示例#7
0
    def __create(self, filepath, mode=None):
        """Create a new file.
                
        We don't implement "mode" (permissions) because the model doesn't agree 
        with GD.
        """

# TODO: Fail if it already exists.

        try:
            result = split_path(filepath, path_resolver)
            (parent_clause, path, filename, mime_type, is_hidden) = result
        except GdNotFoundError:
            _logger.exception("Could not process [%s] (i-create).", filepath)
            raise FuseOSError(ENOENT)
        except:
            _logger.exception("Could not split path [%s] (i-create).",
                              filepath)
            raise FuseOSError(EIO)

        if mime_type is None:
            _, ext = os.path.splitext(filename)
            if ext != '':
                ext = ext[1:]

            mime_type = utility.get_first_mime_type_by_extension(ext)

        distilled_filepath = build_filepath(path, filename)

        gd = get_gdrive()

        try:
            entry = gd.create_file(
                        filename, 
                        [parent_clause[3]], 
                        mime_type,
                        is_hidden=is_hidden)
        except:
            _logger.exception("Could not create empty file [%s] under "
                              "parent with ID [%s].",
                              filename, parent_clause[3])

            raise FuseOSError(EIO)

        path_relations = PathRelations.get_instance()

        try:
            path_relations.register_entry(entry)
        except:
            _logger.exception("Could not register created file in cache.")
            raise FuseOSError(EIO)

        _logger.info("Inner-create of [%s] completed.", distilled_filepath)

        return (entry, path, filename, mime_type)
示例#8
0
    def __load_all_children(self, parent_id):
        gd = get_gdrive()

        with PathRelations.rlock:
            children = gd.list_files(parent_id=parent_id)

            child_ids = []
            if children:
                for child in children:
                    self.register_entry(child)

                parent_clause = self.__get_entry_clause_by_id(parent_id)

                parent_clause[4] = True

        return children
示例#9
0
    def deposit_file(self, mime_type):
        """Write the file to a temporary path, and present a stub (JSON) to the 
        user. This is the only way of getting files that don't have a 
        well-defined filesize without providing a type, ahead of time.
        """

        gd = get_gdrive()

        result = gd.download_to_local(self.__filepath, self.__normalized_entry,
                                      mime_type)

        (length, cache_fault) = result

        _logger.debug(
            "Displaced entry [%s] deposited to [%s] with length "
            "(%d).", self.__normalized_entry, self.__filepath, length)

        return self.get_stub(mime_type, length, self.__filepath)
示例#10
0
    def flush(self):
        """The OS wants to effect any changes made to the file."""

        _LOGGER.debug("Flushing opened-file.")

        entry = self.__cache.get(self.__entry_id)

        if self.__is_dirty is False:
            _LOGGER.debug(
                "Flush will be skipped for [%s] because there "
                "are no changes: [%s] IS_LOADED=[%s] "
                "IS_DIRTY=[%d]", entry.id, self.file_path, self.__is_loaded,
                self.__is_dirty)
            return
        else:
            st = os.stat(self.__temp_filepath)

            _LOGGER.debug(
                "Pushing (%d) bytes for entry with ID from [%s] to "
                "GD for file-path [%s].", st.st_size, entry.id,
                self.__temp_filepath)

            # TODO: Make sure we sync the mtime to remote.
            gd = get_gdrive()
            entry = gd.update_entry(entry,
                                    filename=entry.title,
                                    data_filepath=self.__temp_filepath,
                                    mime_type=self.mime_type,
                                    parents=entry.parents,
                                    is_hidden=self.__is_hidden)

            self.__is_dirty = False

            # TODO(dustin): For now, we don't cleanup the temporary file. We need to
            #               schedule this using LRU-semantics.

            # Immediately update our current cached entry.

            _LOGGER.debug("Update successful. Updating local cache.")

            path_relations = PathRelations.get_instance()
            path_relations.register_entry(entry)

            _LOGGER.info("Update complete on entry with ID [%s].", entry.id)
示例#11
0
    def flush(self):
        """The OS wants to effect any changes made to the file."""

        _LOGGER.debug("Flushing opened-file.")

        entry = self.__cache.get(self.__entry_id)

        if self.__is_dirty is False:
            _LOGGER.debug("Flush will be skipped for [%s] because there "
                          "are no changes: [%s] IS_LOADED=[%s] "
                          "IS_DIRTY=[%d]", 
                          entry.id, self.file_path, self.__is_loaded, 
                          self.__is_dirty)
            return
        else:
            st = os.stat(self.__temp_filepath)

            _LOGGER.debug("Pushing (%d) bytes for entry with ID from [%s] to "
                          "GD for file-path [%s].",
                          st.st_size, entry.id, self.__temp_filepath)

# TODO: Make sure we sync the mtime to remote.
            gd = get_gdrive()
            entry = gd.update_entry(
                        entry, 
                        filename=entry.title, 
                        data_filepath=self.__temp_filepath, 
                        mime_type=self.mime_type, 
                        parents=entry.parents, 
                        is_hidden=self.__is_hidden)

            self.__is_dirty = False

# TODO(dustin): For now, we don't cleanup the temporary file. We need to 
#               schedule this using LRU-semantics.

            # Immediately update our current cached entry.

            _LOGGER.debug("Update successful. Updating local cache.")

            path_relations = PathRelations.get_instance()
            path_relations.register_entry(entry)

            _LOGGER.info("Update complete on entry with ID [%s].", entry.id)
示例#12
0
    def deposit_file(self, mime_type):
        """Write the file to a temporary path, and present a stub (JSON) to the 
        user. This is the only way of getting files that don't have a 
        well-defined filesize without providing a type, ahead of time.
        """

        gd = get_gdrive()

        result = gd.download_to_local(
                    self.__filepath, 
                    self.__normalized_entry,
                    mime_type)

        (length, cache_fault) = result

        _logger.debug("Displaced entry [%s] deposited to [%s] with length "
                      "(%d).", self.__normalized_entry, self.__filepath, length)

        return self.get_stub(mime_type, length, self.__filepath)
示例#13
0
    def process_updates(self):
        """Process any changes to our files. Return True if everything is up to
        date or False if we need to be run again.
        """
        # TODO(dustin): Reimplement using the "watch" interface. We'll have to find
        #               more documentation:
        #
        #               https://developers.google.com/drive/v2/reference/changes/watch
        #
        start_at_id = (self.at_change_id + 1)

        gd = get_gdrive()
        result = gd.list_changes(start_change_id=start_at_id)

        (largest_change_id, next_page_token, changes) = result

        _logger.debug(
            "The latest reported change-ID is (%d) and we're "
            "currently at change-ID (%d).", largest_change_id,
            self.at_change_id)

        _logger.debug("(%d) changes will now be applied.", len(changes))

        for change_id, change_tuple in changes:
            # Apply the changes. We expect to be running them from oldest to
            # newest.

            _logger.debug(
                "========== Change with ID (%d) will now be applied. ==========",
                change_id)

            try:
                self.__apply_change(change_id, change_tuple)
            except:
                _logger.exception("There was a problem while processing change"
                                  " with ID (%d). No more changes will be "
                                  "applied." % (change_id))
                return False

            self.at_change_id = change_id

        return (next_page_token is None)
示例#14
0
    def mkdir(self, filepath, mode):
        """Create the given directory."""

# TODO: Implement the "mode".

        try:
            result = split_path(filepath, path_resolver)
            (parent_clause, path, filename, mime_type, is_hidden) = result
        except GdNotFoundError:
            _logger.exception("Could not process [%s] (mkdir).", filepath)
            raise FuseOSError(ENOENT)
        except:
            _logger.exception("Could not split path [%s] (mkdir).", filepath)
            raise FuseOSError(EIO)

        parent_id = parent_clause[CLAUSE_ID]
        gd = get_gdrive()

        try:
            entry = gd.create_directory(
                        filename, 
                        [parent_id], 
                        is_hidden=is_hidden)
        except:
            _logger.exception("Could not create directory with name [%s] "
                              "and parent with ID [%s].",
                              filename, parent_clause[0].id)
            raise FuseOSError(EIO)

        _logger.info("Directory [%s] created as ID [%s] under parent with "
                     "ID [%s].", filepath, entry.id, parent_id)

        #parent_clause[4] = False

        path_relations = PathRelations.get_instance()

        try:
            path_relations.register_entry(entry)
        except:
            _logger.exception("Could not register new directory in cache.")
            raise FuseOSError(EIO)
示例#15
0
    def process_updates(self):
        """Process any changes to our files. Return True if everything is up to
        date or False if we need to be run again.
        """
# TODO(dustin): Reimplement using the "watch" interface. We'll have to find 
#               more documentation:
#
#               https://developers.google.com/drive/v2/reference/changes/watch
#
        start_at_id = (self.at_change_id + 1)

        gd = get_gdrive()
        result = gd.list_changes(start_change_id=start_at_id)

        (largest_change_id, next_page_token, changes) = result

        _logger.debug("The latest reported change-ID is (%d) and we're "
                      "currently at change-ID (%d).",
                      largest_change_id, self.at_change_id)

        _logger.debug("(%d) changes will now be applied.", len(changes))

        for change_id, change_tuple in changes:
            # Apply the changes. We expect to be running them from oldest to 
            # newest.

            _logger.debug("========== Change with ID (%d) will now be applied. ==========",
                          change_id)

            try:
                self.__apply_change(change_id, change_tuple)
            except:
                _logger.exception("There was a problem while processing change"
                                  " with ID (%d). No more changes will be "
                                  "applied." % (change_id))
                return False

            self.at_change_id = change_id

        return (next_page_token is None)
示例#16
0
    def __load_base_from_remote(self):
        """Download the data for the entry that we represent. This is probably
        a file, but could also be a stub for -any- entry.
        """

        # If it's loaded and not-changed, don't do anything.
        if self.__is_loaded is True and self.__is_dirty is False:
            _LOGGER.debug("Not syncing-down non-dirty file.")
            return

        if self.__fh is not None:
            self.__fh.close()
            self.__fh = None

        entry = self.__cache.get(self.__entry_id)

        _LOGGER.debug("Ensuring local availability of [%s]: [%s]", entry,
                      self.__temp_filepath)

        # Get the current version of the write-cache file, or note that we
        # don't have it.

        _LOGGER.info(
            "Attempting local cache update of file [%s] for entry "
            "[%s] and mime-type [%s].", self.__temp_filepath, entry,
            self.mime_type)

        if entry.requires_mimetype:
            length = DisplacedFile.file_size

            d = DisplacedFile(entry)
            stub_data = d.deposit_file(self.mime_type).encode('utf-8')

            self.__fh = open(self.__temp_filepath, 'w+b')
            self.__fh.write(stub_data)
        else:
            _LOGGER.debug("Executing the download: [%s] => [%s]", entry.id,
                          self.__temp_filepath)

            try:
                # TODO(dustin): We need to inherit a file that we might've already cached by
                #               opening.
                # TODO(dustin): Any call to download_to_local should use a local, temporarily
                #               file is already established. We can't use it in the reverse
                #               order though: It's one thing to already have a cache from
                #               having opened it, and it's a another thing to maintain a cache
                #               of every file that is copied.
                gd = get_gdrive()
                result = gd.download_to_local(self.__temp_filepath, entry,
                                              self.mime_type)

                (length, cache_fault) = result
            except ExportFormatError:
                _LOGGER.exception("There was an export-format error.")
                raise fuse.FuseOSError(ENOENT)

            self.__fh = open(self.__temp_filepath, 'r+b')

            self.__is_dirty = False
            self.__is_loaded = True

        _LOGGER.debug("Established base file-data for [%s]: [%s]", entry,
                      self.__temp_filepath)
示例#17
0
 def get_data(self):
     gd = get_gdrive()
     return gd.get_about_info()
示例#18
0
 def get_data(self):
     gd = get_gdrive()
     return gd.get_about_info()
示例#19
0
    def unlink(self, file_path):
        """Remove a file."""
# TODO: Change to simply move to "trash". Have a FUSE option to elect this
# behavior.
        path_relations = PathRelations.get_instance()

        try:
            entry_clause = path_relations.get_clause_from_path(file_path)
        except GdNotFoundError:
            _logger.exception("Could not process [%s] (unlink).", file_path)
            raise FuseOSError(ENOENT)
        except:
            _logger.exception("Could not get clause from file-path [%s] "
                              "(unlink).", file_path)

            raise FuseOSError(EIO)

        if not entry_clause:
            _logger.error("Path [%s] does not exist for unlink().",
                          file_path)

            raise FuseOSError(ENOENT)

        entry_id = entry_clause[CLAUSE_ID]
        normalized_entry = entry_clause[CLAUSE_ENTRY]

        # Check if a directory.

        if normalized_entry.is_directory:
            _logger.error("Can not unlink() directory [%s] with ID [%s]. "
                          "Must be file.", file_path, entry_id)

            raise FuseOSError(errno.EISDIR)

        # Remove online. Complements local removal (if not found locally, a 
        # follow-up request checks online).

        gd = get_gdrive()

        try:
            gd.remove_entry(normalized_entry)
        except NameError:
            raise FuseOSError(ENOENT)
        except:
            _logger.exception("Could not remove file [%s] with ID [%s].",
                              file_path, entry_id)

            raise FuseOSError(EIO)

        # Remove from cache. Will no longer be able to be found, locally.
        PathRelations.get_instance().remove_entry_all(entry_id)

        # Remove from among opened-files.

        om = gdrivefs.opened_file.get_om()

        try:
            opened_file = om.remove_by_filepath(file_path)
        except:
            _logger.exception("There was an error while removing all "
                                 "opened-file instances for file [%s] "
                                 "(remove).", file_path)
            raise FuseOSError(EIO)
示例#20
0
    def __load_base_from_remote(self):
        """Download the data for the entry that we represent. This is probably 
        a file, but could also be a stub for -any- entry.
        """

        # If it's loaded and not-changed, don't do anything.
        if self.__is_loaded is True and self.__is_dirty is False:
            _LOGGER.debug("Not syncing-down non-dirty file.")
            return

        if self.__fh is not None:
            self.__fh.close()
            self.__fh = None

        entry = self.__cache.get(self.__entry_id)

        _LOGGER.debug("Ensuring local availability of [%s]: [%s]", 
                      entry, self.__temp_filepath)

        # Get the current version of the write-cache file, or note that we 
        # don't have it.

        _LOGGER.info("Attempting local cache update of file [%s] for entry "
                     "[%s] and mime-type [%s].",
                     self.__temp_filepath, entry, self.mime_type)

        if entry.requires_mimetype:
            length = DisplacedFile.file_size

            d = DisplacedFile(entry)
            stub_data = d.deposit_file(self.mime_type).encode('utf-8')

            self.__fh = open(self.__temp_filepath, 'w+b')
            self.__fh.write(stub_data)
        else:
            _LOGGER.debug("Executing the download: [%s] => [%s]", 
                          entry.id, self.__temp_filepath)
            
            try:
# TODO(dustin): We need to inherit a file that we might've already cached by 
#               opening.
# TODO(dustin): Any call to download_to_local should use a local, temporarily 
#               file is already established. We can't use it in the reverse 
#               order though: It's one thing to already have a cache from 
#               having opened it, and it's a another thing to maintain a cache 
#               of every file that is copied.
                gd = get_gdrive()
                result = gd.download_to_local(
                            self.__temp_filepath,
                            entry,
                            self.mime_type)

                (length, cache_fault) = result
            except ExportFormatError:
                _LOGGER.exception("There was an export-format error.")
                raise fuse.FuseOSError(ENOENT)

            self.__fh = open(self.__temp_filepath, 'r+b')

            self.__is_dirty = False
            self.__is_loaded = True

        _LOGGER.debug("Established base file-data for [%s]: [%s]", 
                      entry, self.__temp_filepath)