def upload(self, childpath): precondition_abspath(childpath) #self.verboseprint("uploading %s.." % quote_local_unicode_path(childpath)) metadata = get_local_metadata(childpath) # we can use the backupdb here must_upload, bdb_results = self.check_backupdb_file(childpath) if must_upload: self.verboseprint("uploading %s.." % quote_local_unicode_path(childpath)) infileobj = open(childpath, "rb") url = self.options['node-url'] + "uri" resp = do_http("PUT", url, infileobj) if resp.status not in (200, 201): raise HTTPError("Error during file PUT", resp) filecap = resp.read().strip() self.verboseprint( " %s -> %s" % (quote_local_unicode_path(childpath, quotemarks=False), quote_output(filecap, quotemarks=False))) #self.verboseprint(" metadata: %s" % (quote_output(metadata, quotemarks=False),)) if bdb_results: bdb_results.did_upload(filecap) return True, filecap, metadata else: self.verboseprint("skipping %s.." % quote_local_unicode_path(childpath)) return False, bdb_results.was_uploaded(), metadata
def __init__(self, progressfunc, pathname, basename): precondition_abspath(pathname) self.progressfunc = progressfunc self.pathname = pathname self.children = None self._basename = basename
def __init__(self, client, upload_dircap, collective_dircap, local_path_u, dbfile, umask, name, uploader_delay=1.0, clock=None, downloader_delay=60): precondition_abspath(local_path_u) if not os.path.exists(local_path_u): raise ValueError("'{}' does not exist".format(local_path_u)) if not os.path.isdir(local_path_u): raise ValueError("'{}' is not a directory".format(local_path_u)) # this is used by 'service' things and must be unique in this Service hierarchy self.name = 'magic-folder-{}'.format(name) service.MultiService.__init__(self) clock = clock or reactor db = magicfolderdb.get_magicfolderdb(dbfile, create_version=(magicfolderdb.SCHEMA_v1, 1)) if db is None: return Failure(Exception('ERROR: Unable to load magic folder db.')) # for tests self._client = client self._db = db upload_dirnode = self._client.create_node_from_uri(upload_dircap) collective_dirnode = self._client.create_node_from_uri(collective_dircap) self.uploader = Uploader(client, local_path_u, db, upload_dirnode, uploader_delay, clock) self.downloader = Downloader(client, local_path_u, db, collective_dirnode, upload_dirnode.get_readonly_uri(), clock, self.uploader.is_pending, umask, self.set_public_status, poll_interval=downloader_delay) self._public_status = (False, ['Magic folder has not yet started'])
def process(self, localpath): precondition_abspath(localpath) # returns newdircap quoted_path = quote_local_unicode_path(localpath) self.verboseprint("processing %s" % (quoted_path,)) create_contents = {} # childname -> (type, rocap, metadata) compare_contents = {} # childname -> rocap try: children = listdir_unicode(localpath) except EnvironmentError: self.directories_skipped += 1 self.warn("WARNING: permission denied on directory %s" % (quoted_path,)) children = [] except FilenameEncodingError: self.directories_skipped += 1 self.warn("WARNING: could not list directory %s due to a filename encoding error" % (quoted_path,)) children = [] for child in self.options.filter_listdir(children): assert isinstance(child, unicode), child childpath = os.path.join(localpath, child) # note: symlinks to directories are both islink() and isdir() if os.path.isdir(childpath) and not os.path.islink(childpath): metadata = get_local_metadata(childpath) # recurse on the child directory childcap = self.process(childpath) assert isinstance(childcap, str) create_contents[child] = ("dirnode", childcap, metadata) compare_contents[child] = childcap elif os.path.isfile(childpath) and not os.path.islink(childpath): try: childcap, metadata = self.upload(childpath) assert isinstance(childcap, str) create_contents[child] = ("filenode", childcap, metadata) compare_contents[child] = childcap except EnvironmentError: self.files_skipped += 1 self.warn("WARNING: permission denied on file %s" % quote_local_unicode_path(childpath)) else: self.files_skipped += 1 if os.path.islink(childpath): self.warn("WARNING: cannot backup symlink %s" % quote_local_unicode_path(childpath)) else: self.warn("WARNING: cannot backup special file %s" % quote_local_unicode_path(childpath)) must_create, r = self.check_backupdb_directory(compare_contents) if must_create: self.verboseprint(" creating directory for %s" % quote_local_unicode_path(localpath)) newdircap = mkdir(create_contents, self.options) assert isinstance(newdircap, str) if r: r.did_create(newdircap) self.directories_created += 1 return newdircap else: self.verboseprint(" re-using old directory for %s" % quote_local_unicode_path(localpath)) self.directories_reused += 1 return r.was_created()
def __init__(self, client, upload_dircap, collective_dircap, local_path_u, dbfile, umask, pending_delay=1.0, clock=None): precondition_abspath(local_path_u) service.MultiService.__init__(self) clock = clock or reactor db = magicfolderdb.get_magicfolderdb( dbfile, create_version=(magicfolderdb.SCHEMA_v1, 1)) if db is None: return Failure(Exception('ERROR: Unable to load magic folder db.')) # for tests self._client = client self._db = db upload_dirnode = self._client.create_node_from_uri(upload_dircap) collective_dirnode = self._client.create_node_from_uri( collective_dircap) self.uploader = Uploader(client, local_path_u, db, upload_dirnode, pending_delay, clock) self.downloader = Downloader(client, local_path_u, db, collective_dirnode, upload_dirnode.get_readonly_uri(), clock, self.uploader.is_pending, umask, self.set_public_status) self._public_status = (False, ['Magic folder has not yet started'])
def _write_downloaded_file(self, local_path_u, abspath_u, file_contents, is_conflict=False, now=None): self._log( "_write_downloaded_file(%r, <%d bytes>, is_conflict=%r, now=%r)" % (abspath_u, len(file_contents), is_conflict, now)) # 1. Write a temporary file, say .foo.tmp. # 2. is_conflict determines whether this is an overwrite or a conflict. # 3. Set the mtime of the replacement file to be T seconds before the # current local time. # 4. Perform a file replacement with backup filename foo.backup, # replaced file foo, and replacement file .foo.tmp. If any step of # this operation fails, reclassify as a conflict and stop. # # Returns the path of the destination file. precondition_abspath(abspath_u) replacement_path_u = abspath_u + u".tmp" # FIXME more unique backup_path_u = abspath_u + u".backup" if now is None: now = time.time() initial_path_u = os.path.dirname(abspath_u) fileutil.make_dirs_with_absolute_mode(local_path_u, initial_path_u, (~self._umask) & 0777) fileutil.write(replacement_path_u, file_contents) os.chmod(replacement_path_u, (~self._umask) & 0777) # FUDGE_SECONDS is used to determine if another process # has written to the same file concurrently. This is described # in the Earth Dragon section of our design document: # docs/proposed/magic-folder/remote-to-local-sync.rst os.utime(replacement_path_u, (now, now - self.FUDGE_SECONDS)) if is_conflict: return self._rename_conflicted_file(abspath_u, replacement_path_u) else: try: fileutil.replace_file(abspath_u, replacement_path_u, backup_path_u) return abspath_u except fileutil.ConflictError: return self._rename_conflicted_file(abspath_u, replacement_path_u)
def __init__(self, authorized_keys_file): precondition_abspath(authorized_keys_file) self.authorized_keys_file = authorized_keys_file
def __init__(self, pathname, basename): precondition_abspath(pathname) self.pathname = pathname self._basename = basename
def __init__(self, pathname): precondition_abspath(pathname) self.pathname = pathname