def check_backupdb_directory(self, compare_contents): if not self.backupdb: return True, None r = self.backupdb.check_directory(compare_contents) if not r.was_created(): return True, r if not r.should_check(): # the file was uploaded or checked recently, so we can just use # it return False, r # we must check the directory before re-using it dircap = r.was_created() self.verboseprint("checking %s" % quote_output(dircap)) nodeurl = self.options['node-url'] checkurl = nodeurl + "uri/%s?t=check&output=JSON" % urllib.quote(dircap) self.directories_checked += 1 resp = do_http("POST", checkurl) if resp.status != 200: # can't check, so we must assume it's bad return True, r cr = simplejson.loads(resp.read()) healthy = cr["results"]["healthy"] if not healthy: # must create return True, r # directory is healthy, no need to upload r.did_check_healthy(cr) return False, r
def _get_json_for_fragment(options, fragment, method='GET', post_args=None): nodeurl = options['node-url'] if nodeurl.endswith('/'): nodeurl = nodeurl[:-1] url = u'%s/%s' % (nodeurl, fragment) if method == 'POST': if post_args is None: raise ValueError("Must pass post_args= for POST method") body = urllib.urlencode(post_args) else: body = '' if post_args is not None: raise ValueError("post_args= only valid for POST method") resp = do_http(method, url, body=body) if isinstance(resp, BadResponse): # specifically NOT using format_http_error() here because the # URL is pretty sensitive (we're doing /uri/<key>). raise RuntimeError( "Failed to get json from '%s': %s" % (nodeurl, resp.error) ) data = resp.read() parsed = simplejson.loads(data) if parsed is None: raise RuntimeError("No data from '%s'" % (nodeurl,)) return parsed
def upload(self, childpath): precondition(isinstance(childpath, unicode), childpath) #self.verboseprint("uploading %s.." % quote_output(childpath)) metadata = get_local_metadata(childpath) # we can use the backupdb here must_upload, bdb_results = self.check_backupdb_file(childpath) if must_upload: self.verboseprint("uploading %s.." % quote_output(childpath)) infileobj = open(childpath, "rb") url = self.options['node-url'] + "uri" resp = do_http("PUT", url, infileobj) if resp.status not in (200, 201): raise HTTPError("Error during file PUT", resp) filecap = resp.read().strip() self.verboseprint(" %s -> %s" % (quote_output(childpath, quotemarks=False), quote_output(filecap, quotemarks=False))) #self.verboseprint(" metadata: %s" % (quote_output(metadata, quotemarks=False),)) if bdb_results: bdb_results.did_upload(filecap) self.files_uploaded += 1 return filecap, metadata else: self.verboseprint("skipping %s.." % quote_output(childpath)) self.files_reused += 1 return bdb_results.was_uploaded(), metadata
def create_alias(options): # mkdir+add_alias nodedir = options['node-directory'] alias = options.alias stdout = options.stdout stderr = options.stderr assert ":" not in alias assert " " not in alias old_aliases = get_aliases(nodedir) if alias in old_aliases: print >>stderr, "Alias %s already exists!" % quote_output(alias) return 1 aliasfile = os.path.join(nodedir, "private", "aliases") nodeurl = options['node-url'] if not nodeurl.endswith("/"): nodeurl += "/" url = nodeurl + "uri?t=mkdir" resp = do_http("POST", url) rc = check_http_error(resp, stderr) if rc: return rc new_uri = resp.read().strip() # probably check for others.. add_line_to_aliasfile(aliasfile, alias, new_uri) print >>stdout, "Alias %s created" % (quote_output(alias),) return 0
def unlink(options, command="unlink"): """ @return: a Deferred which eventually fires with the exit code """ nodeurl = options['node-url'] aliases = options.aliases where = options.where stdout = options.stdout stderr = options.stderr if nodeurl[-1] != "/": nodeurl += "/" try: rootcap, path = get_alias(aliases, where, DEFAULT_ALIAS) except UnknownAliasError as e: e.display(stderr) return 1 if not path: print(""" 'tahoe %s' can only unlink directory entries, so a path must be given.""" % (command,), file=stderr) return 1 url = nodeurl + "uri/%s" % urllib.quote(rootcap) url += "/" + escape_path(path) resp = do_http("DELETE", url) if resp.status in (200,): print(format_http_success(resp), file=stdout) return 0 print(format_http_error("ERROR", resp), file=stderr) return 1
def run(self, options): stderr = options.stderr self.options = options self.ophandle = ophandle = base32.b2a(os.urandom(16)) nodeurl = options['node-url'] if not nodeurl.endswith("/"): nodeurl += "/" self.nodeurl = nodeurl where = options.where try: rootcap, path = get_alias(options.aliases, where, DEFAULT_ALIAS) except UnknownAliasError as e: e.display(stderr) return 1 if path == '/': path = '' url = nodeurl + "uri/%s" % urllib.quote(rootcap) if path: url += "/" + escape_path(path) # todo: should it end with a slash? url = self.make_url(url, ophandle) resp = do_http("POST", url) if resp.status not in (200, 302): print(format_http_error("ERROR", resp), file=stderr) return 1 # now we poll for results. We nominally poll at t=1, 5, 10, 30, 60, # 90, k*120 seconds, but if the poll takes non-zero time, that will # be slightly longer. I'm not worried about trying to make up for # that time. return self.wait_for_results()
def check_backupdb_file(self, childpath): if not self.backupdb: return True, None use_timestamps = not self.options["ignore-timestamps"] r = self.backupdb.check_file(childpath, use_timestamps) if not r.was_uploaded(): return True, r if not r.should_check(): # the file was uploaded or checked recently, so we can just use # it return False, r # we must check the file before using the results filecap = r.was_uploaded() self.verboseprint("checking %s" % quote_output(filecap)) nodeurl = self.options['node-url'] checkurl = nodeurl + "uri/%s?t=check&output=JSON" % urllib.quote(filecap) self.files_checked += 1 resp = do_http("POST", checkurl) if resp.status != 200: # can't check, so we must assume it's bad return True, r cr = simplejson.loads(resp.read()) healthy = cr["results"]["healthy"] if not healthy: # must upload return True, r # file is healthy, no need to upload r.did_check_healthy(cr) return False, r
def get_source_info(self, source_spec): """ This turns an argv string into a (Local|Tahoe)(File|Directory)Source. """ precondition(isinstance(source_spec, unicode), source_spec) rootcap, path_utf8 = get_alias(self.aliases, source_spec, None) path = path_utf8.decode("utf-8") # any trailing slash is removed in abspath_expanduser_unicode(), so # make a note of it here, to throw an error later had_trailing_slash = path.endswith("/") if rootcap == DefaultAliasMarker: # no alias, so this is a local file pathname = abspath_expanduser_unicode(path) name = os.path.basename(pathname) if not os.path.exists(pathname): raise MissingSourceError(source_spec, quotefn=quote_local_unicode_path) if os.path.isdir(pathname): t = LocalDirectorySource(self.progress, pathname, name) else: if had_trailing_slash: raise FilenameWithTrailingSlashError(source_spec, quotefn=quote_local_unicode_path) if not os.path.isfile(pathname): raise WeirdSourceError(pathname) t = LocalFileSource(pathname, name) # non-empty else: # this is a tahoe object url = self.nodeurl + "uri/%s" % urllib.quote(rootcap) name = None if path: if path.endswith("/"): path = path[:-1] url += "/" + escape_path(path) last_slash = path.rfind(u"/") name = path if last_slash != -1: name = path[last_slash+1:] resp = do_http("GET", url + "?t=json") if resp.status == 404: raise MissingSourceError(source_spec) elif resp.status != 200: raise HTTPError("Error examining source %s" % quote_output(source_spec), resp) parsed = json.loads(resp.read()) nodetype, d = parsed if nodetype == "dirnode": t = TahoeDirectorySource(self.nodeurl, self.cache, self.progress, name) t.init_from_parsed(parsed) else: if had_trailing_slash: raise FilenameWithTrailingSlashError(source_spec) writecap = to_str(d.get("rw_uri")) readcap = to_str(d.get("ro_uri")) mutable = d.get("mutable", False) # older nodes don't provide it t = TahoeFileSource(self.nodeurl, mutable, writecap, readcap, name) return t
def make_tahoe_subdirectory(nodeurl, parent_writecap, name): url = nodeurl + "/".join(["uri", urllib.quote(parent_writecap), urllib.quote(unicode_to_url(name)), ]) + "?t=mkdir" resp = do_http("POST", url) if resp.status in (200, 201): return resp.read().strip() raise HTTPError("Error during mkdir", resp)
def mkdir(options): nodeurl = options['node-url'] aliases = options.aliases where = options.where stdout = options.stdout stderr = options.stderr if not nodeurl.endswith("/"): nodeurl += "/" if where: try: rootcap, path = get_alias(aliases, where, DEFAULT_ALIAS) except UnknownAliasError as e: e.display(stderr) return 1 if not where or not path: # create a new unlinked directory url = nodeurl + "uri?t=mkdir" if options["format"]: url += "&format=%s" % urllib.quote(options['format']) resp = do_http("POST", url) rc = check_http_error(resp, stderr) if rc: return rc new_uri = resp.read().strip() # emit its write-cap print(quote_output(new_uri, quotemarks=False), file=stdout) return 0 # create a new directory at the given location if path.endswith("/"): path = path[:-1] # path must be "/".join([s.encode("utf-8") for s in segments]) url = nodeurl + "uri/%s/%s?t=mkdir" % (urllib.quote(rootcap), urllib.quote(path)) if options['format']: url += "&format=%s" % urllib.quote(options['format']) resp = do_http("POST", url) check_http_error(resp, stderr) new_uri = resp.read().strip() print(quote_output(new_uri, quotemarks=False), file=stdout) return 0
def get_source_info(self, source_spec): """ This turns an argv string into a (Local|Tahoe)(File|Directory)Source. """ precondition(isinstance(source_spec, unicode), source_spec) rootcap, path_utf8 = get_alias(self.aliases, source_spec, None) path = path_utf8.decode("utf-8") if rootcap == DefaultAliasMarker: # no alias, so this is a local file pathname = abspath_expanduser_unicode(path) name = os.path.basename(pathname) if not os.path.exists(pathname): raise MissingSourceError(source_spec, quotefn=quote_local_unicode_path) if os.path.isdir(pathname): t = LocalDirectorySource(self.progress, pathname, name) else: assert os.path.isfile(pathname) t = LocalFileSource(pathname, name) # non-empty else: # this is a tahoe object url = self.nodeurl + "uri/%s" % urllib.quote(rootcap) name = None if path: url += "/" + escape_path(path) last_slash = path.rfind(u"/") name = path if last_slash != -1: name = path[last_slash+1:] resp = do_http("GET", url + "?t=json") if resp.status == 404: raise MissingSourceError(source_spec) elif resp.status != 200: raise HTTPError("Error examining source %s" % quote_output(source_spec), resp) parsed = simplejson.loads(resp.read()) nodetype, d = parsed if nodetype == "dirnode": t = TahoeDirectorySource(self.nodeurl, self.cache, self.progress, name) t.init_from_parsed(parsed) else: writecap = to_str(d.get("rw_uri")) readcap = to_str(d.get("ro_uri")) mutable = d.get("mutable", False) # older nodes don't provide it last_slash = source_spec.rfind(u"/") if last_slash != -1: # TODO: this looks funny and redundant with the 'name' # assignment above. cf #2329 name = source_spec[last_slash+1:] t = TahoeFileSource(self.nodeurl, mutable, writecap, readcap, name) return t
def init_from_grid(self, writecap, readcap): self.writecap = writecap self.readcap = readcap bestcap = writecap or readcap url = self.nodeurl + "uri/%s" % urllib.quote(bestcap) resp = do_http("GET", url + "?t=json") if resp.status != 200: raise HTTPError("Error examining target directory", resp) parsed = simplejson.loads(resp.read()) nodetype, d = parsed assert nodetype == "dirnode" self.mutable = d.get("mutable", False) # older nodes don't provide it self.children_d = dict([(unicode(name), value) for (name, value) in d["children"].iteritems()]) self.children = None
def mkdir(contents, options): kids = dict([ (childname, (contents[childname][0], {"ro_uri": contents[childname][1], "metadata": contents[childname][2], })) for childname in contents ]) body = simplejson.dumps(kids).encode("utf-8") url = options['node-url'] + "uri?t=mkdir-immutable" resp = do_http("POST", url, body) if resp.status < 200 or resp.status >= 300: raise HTTPError("Error during mkdir", resp) dircap = to_str(resp.read().strip()) return dircap
def deepcheck_location(self, options, where): stdout = options.stdout stderr = options.stderr self.rc = 0 self.options = options nodeurl = options['node-url'] if not nodeurl.endswith("/"): nodeurl += "/" self.nodeurl = nodeurl try: rootcap, path = get_alias(options.aliases, where, DEFAULT_ALIAS) except UnknownAliasError as e: e.display(stderr) return 1 if path == '/': path = '' url = nodeurl + "uri/%s" % urllib.quote(rootcap) if path: url += "/" + escape_path(path) # todo: should it end with a slash? url += "?t=stream-deep-check" if options["verify"]: url += "&verify=true" if options["repair"]: url += "&repair=true" output = DeepCheckAndRepairOutput(self, options) else: output = DeepCheckOutput(self, options) if options["add-lease"]: url += "&add-lease=true" resp = do_http("POST", url) if resp.status not in (200, 302): print(format_http_error("ERROR", resp), file=stderr) return 1 # use Twisted to split this into lines while True: chunk = resp.read(100) if not chunk: break if self.options["raw"]: stdout.write(chunk) else: output.dataReceived(chunk) if not self.options["raw"]: output.done() return 0
def get_source_info(self, source_spec): rootcap, path = get_alias(self.aliases, source_spec, None) if rootcap == DefaultAliasMarker: # no alias, so this is a local file pathname = abspath_expanduser_unicode(path.decode('utf-8')) name = os.path.basename(pathname) if not os.path.exists(pathname): raise MissingSourceError(source_spec) if os.path.isdir(pathname): t = LocalDirectorySource(self.progress, pathname) else: assert os.path.isfile(pathname) t = LocalFileSource(pathname) # non-empty else: # this is a tahoe object url = self.nodeurl + "uri/%s" % urllib.quote(rootcap) name = None if path: url += "/" + escape_path(path) last_slash = path.rfind("/") name = path if last_slash: name = path[last_slash+1:] resp = do_http("GET", url + "?t=json") if resp.status == 404: raise MissingSourceError(source_spec) elif resp.status != 200: raise HTTPError("Error examining source %s" % quote_output(source_spec), resp) parsed = simplejson.loads(resp.read()) nodetype, d = parsed if nodetype == "dirnode": t = TahoeDirectorySource(self.nodeurl, self.cache, self.progress) t.init_from_parsed(parsed) else: writecap = to_str(d.get("rw_uri")) readcap = to_str(d.get("ro_uri")) mutable = d.get("mutable", False) # older nodes don't provide it if source_spec.rfind('/') != -1: name = source_spec[source_spec.rfind('/')+1:] t = TahoeFileSource(self.nodeurl, mutable, writecap, readcap) return name, t
def get_target_info(self, destination_spec): precondition(isinstance(destination_spec, unicode), destination_spec) rootcap, path_utf8 = get_alias(self.aliases, destination_spec, None) path = path_utf8.decode("utf-8") if rootcap == DefaultAliasMarker: # no alias, so this is a local file pathname = abspath_expanduser_unicode(path) if not os.path.exists(pathname): t = LocalMissingTarget(pathname) elif os.path.isdir(pathname): t = LocalDirectoryTarget(self.progress, pathname) else: # TODO: should this be _assert? what happens if the target is # a special file? assert os.path.isfile(pathname), pathname t = LocalFileTarget(pathname) # non-empty else: # this is a tahoe object url = self.nodeurl + "uri/%s" % urllib.quote(rootcap) if path: url += "/" + escape_path(path) resp = do_http("GET", url + "?t=json") if resp.status == 404: # doesn't exist yet t = TahoeMissingTarget(url) elif resp.status == 200: parsed = json.loads(resp.read()) nodetype, d = parsed if nodetype == "dirnode": t = TahoeDirectoryTarget(self.nodeurl, self.cache, self.progress) t.init_from_parsed(parsed) else: writecap = to_str(d.get("rw_uri")) readcap = to_str(d.get("ro_uri")) mutable = d.get("mutable", False) t = TahoeFileTarget(self.nodeurl, mutable, writecap, readcap, url) else: raise HTTPError("Error examining target %s" % quote_output(destination_spec), resp) return t
def poll(self): url = self.nodeurl + "operations/" + self.ophandle url += "?t=status&output=JSON&release-after-complete=true" stdout = self.options.stdout stderr = self.options.stderr resp = do_http("GET", url) if resp.status != 200: print >>stderr, format_http_error("ERROR", resp) return True jdata = resp.read() data = simplejson.loads(jdata) if not data["finished"]: return False if self.options.get("raw"): if is_printable_ascii(jdata): print >>stdout, jdata else: print >>stderr, "The JSON response contained unprintable characters:\n%s" % quote_output(jdata) return True self.write_results(data) return True
def get(options): nodeurl = options['node-url'] aliases = options.aliases from_file = options.from_file to_file = options.to_file stdout = options.stdout stderr = options.stderr if nodeurl[-1] != "/": nodeurl += "/" try: rootcap, path = get_alias(aliases, from_file, DEFAULT_ALIAS) except UnknownAliasError as e: e.display(stderr) return 1 url = nodeurl + "uri/%s" % urllib.quote(rootcap) if path: url += "/" + escape_path(path) resp = do_http("GET", url) if resp.status in (200, 201,): if to_file: outf = open(to_file, "wb") else: outf = stdout while True: data = resp.read(4096) if not data: break outf.write(data) if to_file: outf.close() rc = 0 else: print(format_http_error("Error during GET", resp), file=stderr) rc = 1 return rc
def run(self, options): self.rc = 0 stdout = options.stdout stderr = options.stderr self.options = options nodeurl = options['node-url'] if not nodeurl.endswith("/"): nodeurl += "/" self.nodeurl = nodeurl where = options.where try: rootcap, path = get_alias(options.aliases, where, DEFAULT_ALIAS) except UnknownAliasError as e: e.display(stderr) return 1 if path == '/': path = '' url = nodeurl + "uri/%s" % urllib.quote(rootcap) if path: url += "/" + escape_path(path) # todo: should it end with a slash? url += "?t=stream-manifest" resp = do_http("POST", url) if resp.status not in (200, 302): print(format_http_error("ERROR", resp), file=stderr) return 1 #print "RESP", dir(resp) # use Twisted to split this into lines self.in_error = False while True: chunk = resp.read(100) if not chunk: break if self.options["raw"]: stdout.write(chunk) else: self.dataReceived(chunk) return self.rc
def create_alias(options): # mkdir+add_alias nodedir = options['node-directory'] alias = options.alias precondition(isinstance(alias, unicode), alias=alias) stdout = options.stdout stderr = options.stderr if u":" in alias: # a single trailing colon will already have been stripped if present print >>stderr, "Alias names cannot contain colons." return 1 if u" " in alias: print >>stderr, "Alias names cannot contain spaces." return 1 old_aliases = get_aliases(nodedir) if alias in old_aliases: print >>stderr, "Alias %s already exists!" % quote_output(alias) return 1 aliasfile = os.path.join(nodedir, "private", "aliases") nodeurl = options['node-url'] if not nodeurl.endswith("/"): nodeurl += "/" url = nodeurl + "uri?t=mkdir" resp = do_http("POST", url) rc = check_http_error(resp, stderr) if rc: return rc new_uri = resp.read().strip() # probably check for others.. add_line_to_aliasfile(aliasfile, alias, new_uri) print >>stdout, "Alias %s created" % (quote_output(alias),) return 0
def mkdir(targeturl): url = targeturl + "?t=mkdir" resp = do_http("POST", url) if resp.status in (200, 201): return resp.read().strip() raise HTTPError("Error during mkdir", resp)
def POST(url, data): resp = do_http("POST", url, data) if resp.status in (200, 201): return resp.read() raise HTTPError("Error during POST", resp)
def GET_to_file(url): resp = do_http("GET", url) if resp.status == 200: return resp raise HTTPError("Error during GET", resp)
to_file = options.to_file stdout = options.stdout stderr = options.stderr if nodeurl[-1] != "/": nodeurl += "/" try: rootcap, from_path = get_alias(aliases, from_file, DEFAULT_ALIAS) except UnknownAliasError, e: e.display(stderr) return 1 from_url = nodeurl + "uri/%s" % urllib.quote(rootcap) if from_path: from_url += "/" + escape_path(from_path) # figure out the source cap resp = do_http("GET", from_url + "?t=json") if not re.search(r'^2\d\d$', str(resp.status)): print >> stderr, format_http_error("Error", resp) return 1 data = resp.read() nodetype, attrs = json.loads(data) cap = to_str(attrs.get("rw_uri") or attrs["ro_uri"]) # now get the target try: rootcap, path = get_alias(aliases, to_file, DEFAULT_ALIAS) except UnknownAliasError, e: e.display(stderr) return 1 to_url = nodeurl + "uri/%s" % urllib.quote(rootcap) if path:
def run(self): options = self.options nodeurl = options['node-url'] self.verbosity = 1 if options['quiet']: self.verbosity = 0 if options['verbose']: self.verbosity = 2 stdout = options.stdout stderr = options.stderr start_timestamp = datetime.datetime.now() bdbfile = os.path.join(options["node-directory"], "private", "backupdb.sqlite") bdbfile = abspath_expanduser_unicode(bdbfile) self.backupdb = backupdb.get_backupdb(bdbfile, stderr) if not self.backupdb: print("ERROR: Unable to load backup db.", file=stderr) return 1 try: rootcap, path = get_alias(options.aliases, options.to_dir, DEFAULT_ALIAS) except UnknownAliasError as e: e.display(stderr) return 1 to_url = nodeurl + "uri/%s/" % urllib.quote(rootcap) if path: to_url += escape_path(path) if not to_url.endswith("/"): to_url += "/" archives_url = to_url + "Archives/" # first step: make sure the target directory exists, as well as the # Archives/ subdirectory. resp = do_http("GET", archives_url + "?t=json") if resp.status == 404: resp = do_http("POST", archives_url + "?t=mkdir") if resp.status != 200: print(format_http_error("Unable to create target directory", resp), file=stderr) return 1 # second step: process the tree targets = list(collect_backup_targets( options.from_dir, listdir_unicode, self.options.filter_listdir, )) completed = run_backup( warn=self.warn, upload_file=self.upload, upload_directory=self.upload_directory, targets=targets, start_timestamp=start_timestamp, stdout=stdout, ) new_backup_dircap = completed.dircap # third: attach the new backup to the list now = time_format.iso_utc(int(time.time()), sep="_") + "Z" put_child(archives_url, now, new_backup_dircap) put_child(to_url, "Latest", new_backup_dircap) print(completed.report( self.verbosity, self._files_checked, self._directories_checked, ), file=stdout) # The command exits with code 2 if files or directories were skipped if completed.any_skips(): return 2 # done! return 0
if not nodeurl.endswith("/"): nodeurl += "/" if where.endswith("/"): where = where[:-1] try: rootcap, path = get_alias(aliases, where, DEFAULT_ALIAS) except UnknownAliasError, e: e.display(stderr) return 1 url = nodeurl + "uri/%s" % urllib.quote(rootcap) if path: # move where.endswith check here? url += "/" + escape_path(path) assert not url.endswith("/") url += "?t=json" resp = do_http("GET", url) if resp.status == 404: print >>stderr, "No such file or directory" return 2 if resp.status != 200: print >>stderr, format_http_error("Error during GET", resp) if resp.status == 0: return 3 else: return resp.status data = resp.read() if options['json']: # The webapi server should always output printable ASCII. if is_printable_ascii(data):
# unlinked upload url = nodeurl + "uri" queryargs = [] if mutable: queryargs.append("mutable=true") if format: queryargs.append("format=%s" % format) if queryargs: url += "?" + "&".join(queryargs) if from_file: infileobj = open(os.path.expanduser(from_file), "rb") else: # do_http() can't use stdin directly: for one thing, we need a # Content-Length field. So we currently must copy it. if verbosity > 0: print >>stderr, "waiting for file data on stdin.." data = stdin.read() infileobj = StringIO(data) resp = do_http("PUT", url, infileobj) if resp.status in (200, 201,): print >>stderr, format_http_success(resp) print >>stdout, quote_output(resp.read(), quotemarks=False) return 0 print >>stderr, format_http_error("Error", resp) return 1
def put_child(dirurl, childname, childcap): assert dirurl[-1] == "/" url = dirurl + urllib.quote(unicode_to_url(childname)) + "?t=uri" resp = do_http("PUT", url, childcap) if resp.status not in (200, 201): raise HTTPError("Error during put_child", resp)
to_file = options.to_file stdout = options.stdout stderr = options.stderr if nodeurl[-1] != "/": nodeurl += "/" try: rootcap, from_path = get_alias(aliases, from_file, DEFAULT_ALIAS) except UnknownAliasError, e: e.display(stderr) return 1 from_url = nodeurl + "uri/%s" % urllib.quote(rootcap) if from_path: from_url += "/" + escape_path(from_path) # figure out the source cap resp = do_http("GET", from_url + "?t=json") if not re.search(r'^2\d\d$', str(resp.status)): print >>stderr, format_http_error("Error", resp) return 1 data = resp.read() nodetype, attrs = simplejson.loads(data) cap = to_str(attrs.get("rw_uri") or attrs["ro_uri"]) # now get the target try: rootcap, path = get_alias(aliases, to_file, DEFAULT_ALIAS) except UnknownAliasError, e: e.display(stderr) return 1 to_url = nodeurl + "uri/%s" % urllib.quote(rootcap) if path:
class BackerUpper: def __init__(self, options): self.options = options self.files_uploaded = 0 self.files_reused = 0 self.files_checked = 0 self.files_skipped = 0 self.directories_created = 0 self.directories_reused = 0 self.directories_checked = 0 self.directories_skipped = 0 def run(self): options = self.options nodeurl = options['node-url'] self.verbosity = 1 if options['quiet']: self.verbosity = 0 if options['verbose']: self.verbosity = 2 stdout = options.stdout stderr = options.stderr start_timestamp = datetime.datetime.now() self.backupdb = None bdbfile = os.path.join(options["node-directory"], "private", "backupdb.sqlite") bdbfile = abspath_expanduser_unicode(bdbfile) self.backupdb = backupdb.get_backupdb(bdbfile, stderr) if not self.backupdb: print >> stderr, "ERROR: Unable to load backup db." return 1 try: rootcap, path = get_alias(options.aliases, options.to_dir, DEFAULT_ALIAS) except UnknownAliasError, e: e.display(stderr) return 1 to_url = nodeurl + "uri/%s/" % urllib.quote(rootcap) if path: to_url += escape_path(path) if not to_url.endswith("/"): to_url += "/" archives_url = to_url + "Archives/" # first step: make sure the target directory exists, as well as the # Archives/ subdirectory. resp = do_http("GET", archives_url + "?t=json") if resp.status == 404: resp = do_http("POST", archives_url + "?t=mkdir") if resp.status != 200: print >> stderr, format_http_error( "Unable to create target directory", resp) return 1 # second step: process the tree new_backup_dircap = self.process(options.from_dir) # third: attach the new backup to the list now = time_format.iso_utc(int(time.time()), sep="_") + "Z" put_child(archives_url, now, new_backup_dircap) put_child(to_url, "Latest", new_backup_dircap) end_timestamp = datetime.datetime.now() # calc elapsed time, omitting microseconds elapsed_time = str(end_timestamp - start_timestamp).split('.')[0] if self.verbosity >= 1: print >> stdout, ( " %d files uploaded (%d reused), " "%d files skipped, " "%d directories created (%d reused), " "%d directories skipped" % (self.files_uploaded, self.files_reused, self.files_skipped, self.directories_created, self.directories_reused, self.directories_skipped)) if self.verbosity >= 2: print >> stdout, ( " %d files checked, %d directories checked" % (self.files_checked, self.directories_checked)) print >> stdout, " backup done, elapsed time: %s" % elapsed_time # The command exits with code 2 if files or directories were skipped if self.files_skipped or self.directories_skipped: return 2 # done! return 0
return 1 if path == '/': path = '' url = nodeurl + "uri/%s" % urllib.quote(rootcap) if path: url += "/" + escape_path(path) # todo: should it end with a slash? url += "?t=check&output=JSON" if options["verify"]: url += "&verify=true" if options["repair"]: url += "&repair=true" if options["add-lease"]: url += "&add-lease=true" resp = do_http("POST", url) if resp.status != 200: print >>stderr, format_http_error("ERROR", resp) return 1 jdata = resp.read() if options.get("raw"): stdout.write(jdata) stdout.write("\n") return 0 data = simplejson.loads(jdata) if options["repair"]: # show repair status if data["pre-repair-results"]["results"]["healthy"]: summary = "healthy" else:
def rm(options): """ @return: a Deferred which eventually fires with the exit code """ nodeurl = options['node-url'] aliases = options.aliases where = options.where stdout = options.stdout stderr = options.stderr if nodeurl[-1] != "/": nodeurl += "/" try: rootcap, path = get_alias(aliases, where, DEFAULT_ALIAS) except UnknownAliasError, e: e.display(stderr) return 1 assert path url = nodeurl + "uri/%s" % urllib.quote(rootcap) url += "/" + escape_path(path) resp = do_http("DELETE", url) if resp.status in (200,): print >>stdout, format_http_success(resp) return 0 print >>stderr, format_http_error("ERROR", resp) return 1
def get_source_info(self, source_spec): """ This turns an argv string into a (Local|Tahoe)(File|Directory)Source. """ precondition(isinstance(source_spec, unicode), source_spec) rootcap, path_utf8 = get_alias(self.aliases, source_spec, None) path = path_utf8.decode("utf-8") # any trailing slash is removed in abspath_expanduser_unicode(), so # make a note of it here, to throw an error later had_trailing_slash = path.endswith("/") if rootcap == DefaultAliasMarker: # no alias, so this is a local file pathname = abspath_expanduser_unicode(path) name = os.path.basename(pathname) if not os.path.exists(pathname): raise MissingSourceError(source_spec, quotefn=quote_local_unicode_path) if os.path.isdir(pathname): t = LocalDirectorySource(self.progress, pathname, name) else: if had_trailing_slash: raise FilenameWithTrailingSlashError( source_spec, quotefn=quote_local_unicode_path) if not os.path.isfile(pathname): raise WeirdSourceError(pathname) t = LocalFileSource(pathname, name) # non-empty else: # this is a tahoe object url = self.nodeurl + "uri/%s" % urllib.quote(rootcap) name = None if path: if path.endswith("/"): path = path[:-1] url += "/" + escape_path(path) last_slash = path.rfind(u"/") name = path if last_slash != -1: name = path[last_slash + 1:] resp = do_http("GET", url + "?t=json") if resp.status == 404: raise MissingSourceError(source_spec) elif resp.status != 200: raise HTTPError( "Error examining source %s" % quote_output(source_spec), resp) parsed = json.loads(resp.read()) nodetype, d = parsed if nodetype == "dirnode": t = TahoeDirectorySource(self.nodeurl, self.cache, self.progress, name) t.init_from_parsed(parsed) else: if had_trailing_slash: raise FilenameWithTrailingSlashError(source_spec) writecap = to_bytes(d.get("rw_uri")) readcap = to_bytes(d.get("ro_uri")) mutable = d.get("mutable", False) # older nodes don't provide it t = TahoeFileSource(self.nodeurl, mutable, writecap, readcap, name) return t
def check_location(options, where): stdout = options.stdout stderr = options.stderr nodeurl = options['node-url'] if not nodeurl.endswith("/"): nodeurl += "/" try: rootcap, path = get_alias(options.aliases, where, DEFAULT_ALIAS) except UnknownAliasError as e: e.display(stderr) return 1 if path == '/': path = '' url = nodeurl + "uri/%s" % urllib.quote(rootcap) if path: url += "/" + escape_path(path) # todo: should it end with a slash? url += "?t=check&output=JSON" if options["verify"]: url += "&verify=true" if options["repair"]: url += "&repair=true" if options["add-lease"]: url += "&add-lease=true" resp = do_http("POST", url) if resp.status != 200: print(format_http_error("ERROR", resp), file=stderr) return 1 jdata = resp.read() if options.get("raw"): stdout.write(jdata) stdout.write("\n") return 0 data = json.loads(jdata) if options["repair"]: # show repair status if data["pre-repair-results"]["results"]["healthy"]: summary = "healthy" else: summary = "not healthy" stdout.write("Summary: %s\n" % summary) cr = data["pre-repair-results"]["results"] stdout.write(" storage index: %s\n" % quote_output(data["storage-index"], quotemarks=False)) stdout.write(" good-shares: %r (encoding is %r-of-%r)\n" % (cr["count-shares-good"], cr["count-shares-needed"], cr["count-shares-expected"])) stdout.write(" wrong-shares: %r\n" % cr["count-wrong-shares"]) corrupt = cr["list-corrupt-shares"] if corrupt: stdout.write(" corrupt shares:\n") for (serverid, storage_index, sharenum) in corrupt: stdout.write(" %s\n" % _quote_serverid_index_share(serverid, storage_index, sharenum)) if data["repair-attempted"]: if data["repair-successful"]: stdout.write(" repair successful\n") else: stdout.write(" repair failed\n") else: # LIT files and directories do not have a "summary" field. summary = data.get("summary", "Healthy (LIT)") stdout.write("Summary: %s\n" % quote_output(summary, quotemarks=False)) cr = data["results"] stdout.write(" storage index: %s\n" % quote_output(data["storage-index"], quotemarks=False)) if all([field in cr for field in ("count-shares-good", "count-shares-needed", "count-shares-expected", "count-wrong-shares")]): stdout.write(" good-shares: %r (encoding is %r-of-%r)\n" % (cr["count-shares-good"], cr["count-shares-needed"], cr["count-shares-expected"])) stdout.write(" wrong-shares: %r\n" % cr["count-wrong-shares"]) corrupt = cr.get("list-corrupt-shares", []) if corrupt: stdout.write(" corrupt shares:\n") for (serverid, storage_index, sharenum) in corrupt: stdout.write(" %s\n" % _quote_serverid_index_share(serverid, storage_index, sharenum)) return 0;
def ls(options): nodeurl = options['node-url'] aliases = options.aliases where = options.where stdout = options.stdout stderr = options.stderr if not nodeurl.endswith("/"): nodeurl += "/" if where.endswith("/"): where = where[:-1] try: rootcap, path = get_alias(aliases, where, DEFAULT_ALIAS) except UnknownAliasError as e: e.display(stderr) return 1 path = str(path, "utf-8") url = nodeurl + "uri/%s" % url_quote(rootcap) if path: # move where.endswith check here? url += "/" + escape_path(path) assert not url.endswith("/") url += "?t=json" resp = do_http("GET", url) if resp.status == 404: print("No such file or directory", file=stderr) return 2 if resp.status != 200: print(format_http_error("Error during GET", resp), file=stderr) if resp.status == 0: return 3 else: return resp.status data = resp.read() if options['json']: # The webapi server should always output printable ASCII. if is_printable_ascii(data): data = str(data, "ascii") print(data, file=stdout) return 0 else: print("The JSON response contained unprintable characters:", file=stderr) print(quote_output(data, quotemarks=False), file=stderr) return 1 try: parsed = json.loads(data) except Exception as e: print("error: %s" % quote_output(e.args[0], quotemarks=False), file=stderr) print("Could not parse JSON response:", file=stderr) print(quote_output(data, quotemarks=False), file=stderr) return 1 nodetype, d = parsed children = {} if nodetype == "dirnode": children = d['children'] else: # paths returned from get_alias are always valid UTF-8 childname = path.split("/")[-1] children = {childname: (nodetype, d)} if "metadata" not in d: d["metadata"] = {} childnames = sorted(children.keys()) now = time.time() # we build up a series of rows, then we loop through them to compute a # maxwidth so we can format them tightly. Size, filename, and URI are the # variable-width ones. rows = [] has_unknowns = False for name in childnames: child = children[name] name = str(name) childtype = child[0] # See webapi.txt for a discussion of the meanings of unix local # filesystem mtime and ctime, Tahoe mtime and ctime, and Tahoe # linkmotime and linkcrtime. ctime = child[1].get("metadata", {}).get('tahoe', {}).get("linkcrtime") if not ctime: ctime = child[1]["metadata"].get("ctime") mtime = child[1].get("metadata", {}).get('tahoe', {}).get("linkmotime") if not mtime: mtime = child[1]["metadata"].get("mtime") rw_uri = to_bytes(child[1].get("rw_uri")) ro_uri = to_bytes(child[1].get("ro_uri")) if ctime: # match for formatting that GNU 'ls' does if (now - ctime) > 6 * 30 * 24 * 60 * 60: # old files fmt = "%b %d %Y" else: fmt = "%b %d %H:%M" ctime_s = time.strftime(fmt, time.localtime(ctime)) else: ctime_s = "-" if childtype == "dirnode": t0 = "d" size = "-" classify = "/" elif childtype == "filenode": t0 = "-" size = str(child[1].get("size", "?")) classify = "" if rw_uri: classify = "*" else: has_unknowns = True t0 = "?" size = "?" classify = "?" t1 = "-" if ro_uri: t1 = "r" t2 = "-" if rw_uri: t2 = "w" t3 = "-" if childtype == "dirnode": t3 = "x" uri = rw_uri or ro_uri line = [] if options["long"]: line.append(t0 + t1 + t2 + t3) line.append(size) line.append(ctime_s) if not options["classify"]: classify = "" line.append(name + classify) if options["uri"]: line.append(ensure_text(uri)) if options["readonly-uri"]: line.append( quote_output(ensure_text(ro_uri) or "-", quotemarks=False)) rows.append(line) max_widths = [] left_justifys = [] for row in rows: for i, cell in enumerate(row): while len(max_widths) <= i: max_widths.append(0) while len(left_justifys) <= i: left_justifys.append(False) max_widths[i] = max(max_widths[i], len(cell)) if ensure_text(cell).startswith("URI"): left_justifys[i] = True if len(left_justifys) == 1: left_justifys[0] = True fmt_pieces = [] for i in range(len(max_widths)): piece = "%" if left_justifys[i]: piece += "-" piece += str(max_widths[i]) piece += "s" fmt_pieces.append(piece) fmt = " ".join(fmt_pieces) rc = 0 for row in rows: row = (fmt % tuple(row)).rstrip() encoding_error = False try: row = unicode_to_output(row) except UnicodeEncodeError: encoding_error = True row = quote_output(row) if encoding_error: print(row, file=stderr) rc = 1 else: print(row, file=stdout) if rc == 1: print("\nThis listing included files whose names could not be converted to the terminal" \ "\noutput encoding. Their names are shown using backslash escapes and in quotes.", file=stderr) if has_unknowns: print("\nThis listing included unknown objects. Using a webapi server that supports" \ "\na later version of Tahoe may help.", file=stderr) return rc
try: rootcap, path = get_alias(aliases, to_file, DEFAULT_ALIAS) except UnknownAliasError, e: e.display(stderr) return 1 to_url = nodeurl + "uri/%s" % urllib.quote(rootcap) if path: to_url += "/" + escape_path(path) if to_url.endswith("/"): # "mv foo.txt bar/" == "mv foo.txt bar/foo.txt" to_url += escape_path(from_path[from_path.rfind("/")+1:]) to_url += "?t=uri&replace=only-files" resp = do_http("PUT", to_url, cap) status = resp.status if not re.search(r'^2\d\d$', str(status)): if status == 409: print >>stderr, "Error: You can't overwrite a directory with a file" else: print >>stderr, format_http_error("Error", resp) if mode == "move": print >>stderr, "NOT removing the original" return 1 if mode == "move": # now remove the original resp = do_http("DELETE", from_url) if not re.search(r'^2\d\d$', str(status)): print >>stderr, format_http_error("Error deleting original after move", resp)
nodeurl = options['node-url'] aliases = options.aliases where = options.where stdout = options.stdout stderr = options.stderr if nodeurl[-1] != "/": nodeurl += "/" try: rootcap, path = get_alias(aliases, where, DEFAULT_ALIAS) except UnknownAliasError, e: e.display(stderr) return 1 if not path: print >> stderr, """ 'tahoe %s' can only unlink directory entries, so a path must be given.""" % ( command, ) return 1 url = nodeurl + "uri/%s" % urllib.quote(rootcap) url += "/" + escape_path(path) resp = do_http("DELETE", url) if resp.status in (200, ): print >> stdout, format_http_success(resp) return 0 print >> stderr, format_http_error("ERROR", resp) return 1
def check_location(options, where): stdout = options.stdout stderr = options.stderr nodeurl = options['node-url'] if not nodeurl.endswith("/"): nodeurl += "/" try: rootcap, path = get_alias(options.aliases, where, DEFAULT_ALIAS) except UnknownAliasError as e: e.display(stderr) return 1 path = str(path, "utf-8") if path == '/': path = '' url = nodeurl + "uri/%s" % url_quote(rootcap) if path: url += "/" + escape_path(path) # todo: should it end with a slash? url += "?t=check&output=JSON" if options["verify"]: url += "&verify=true" if options["repair"]: url += "&repair=true" if options["add-lease"]: url += "&add-lease=true" resp = do_http("POST", url) if resp.status != 200: print(format_http_error("ERROR", resp), file=stderr) return 1 jdata = resp.read().decode() if options.get("raw"): stdout.write(jdata) stdout.write("\n") return 0 data = json.loads(jdata) if options["repair"]: # show repair status if data["pre-repair-results"]["results"]["healthy"]: summary = "healthy" else: summary = "not healthy" stdout.write("Summary: %s\n" % summary) cr = data["pre-repair-results"]["results"] stdout.write(" storage index: %s\n" % quote_output(data["storage-index"], quotemarks=False)) stdout.write(" good-shares: %r (encoding is %r-of-%r)\n" % (cr["count-shares-good"], cr["count-shares-needed"], cr["count-shares-expected"])) stdout.write(" wrong-shares: %r\n" % cr["count-wrong-shares"]) corrupt = cr["list-corrupt-shares"] if corrupt: stdout.write(" corrupt shares:\n") for (serverid, storage_index, sharenum) in corrupt: stdout.write(" %s\n" % _quote_serverid_index_share(serverid, storage_index, sharenum)) if data["repair-attempted"]: if data["repair-successful"]: stdout.write(" repair successful\n") else: stdout.write(" repair failed\n") else: # LIT files and directories do not have a "summary" field. summary = data.get("summary", "Healthy (LIT)") stdout.write("Summary: %s\n" % quote_output(summary, quotemarks=False)) cr = data["results"] stdout.write(" storage index: %s\n" % quote_output(data["storage-index"], quotemarks=False)) if all([field in cr for field in ("count-shares-good", "count-shares-needed", "count-shares-expected", "count-wrong-shares")]): stdout.write(" good-shares: %r (encoding is %r-of-%r)\n" % (cr["count-shares-good"], cr["count-shares-needed"], cr["count-shares-expected"])) stdout.write(" wrong-shares: %r\n" % cr["count-wrong-shares"]) corrupt = cr.get("list-corrupt-shares", []) if corrupt: stdout.write(" corrupt shares:\n") for (serverid, storage_index, sharenum) in corrupt: stdout.write(" %s\n" % _quote_serverid_index_share(serverid, storage_index, sharenum)) return 0;