def render_lease_current_cycle_results(self, ctx, data): lc = self.storage.lease_checker p = lc.get_progress() if not p["cycle-in-progress"]: return "" s = lc.get_state() so_far = s["cycle-to-date"] sr = so_far["space-recovered"] er = s["estimated-remaining-cycle"] esr = er["space-recovered"] ec = s["estimated-current-cycle"] ecr = ec["space-recovered"] p = T.ul() def add(*pieces): p[T.li[pieces]] def maybe(d): if d is None: return "?" return "%d" % d add("So far, this cycle has examined %d shares in %d buckets" % (sr["examined-shares"], sr["examined-buckets"]), " (%d mutable / %d immutable)" % (sr["examined-buckets-mutable"], sr["examined-buckets-immutable"]), " (%s / %s)" % (abbreviate_space(sr["examined-diskbytes-mutable"]), abbreviate_space(sr["examined-diskbytes-immutable"])), ) add("and has recovered: ", self.format_recovered(sr, "actual")) if so_far["expiration-enabled"]: add("The remainder of this cycle is expected to recover: ", self.format_recovered(esr, "actual")) add("The whole cycle is expected to examine %s shares in %s buckets" % (maybe(ecr["examined-shares"]), maybe(ecr["examined-buckets"]))) add("and to recover: ", self.format_recovered(ecr, "actual")) else: add("If expiration were enabled, we would have recovered: ", self.format_recovered(sr, "configured"), " by now") add("and the remainder of this cycle would probably recover: ", self.format_recovered(esr, "configured")) add("and the whole cycle would probably recover: ", self.format_recovered(ecr, "configured")) add("if we were strictly using each lease's default 31-day lease lifetime " "(instead of our configured behavior), " "this cycle would be expected to recover: ", self.format_recovered(ecr, "original")) if so_far["corrupt-shares"]: add("Corrupt shares:", T.ul[ [T.li[ ["SI %s shnum %d" % corrupt_share for corrupt_share in so_far["corrupt-shares"] ] ]]]) return ctx.tag["Current cycle:", p]
def do_status(options, do_http=None): if do_http is None: from allmydata.scripts.common_http import do_http nodedir = options["node-directory"] with open(os.path.join(nodedir, u'private', u'api_auth_token'), 'r') as f: token = f.read().strip() with open(os.path.join(nodedir, u'node.url'), 'r') as f: options['node-url'] = f.read().strip() # do *all* our data-retrievals first in case there's an error try: status_data = _handle_response_for_fragment( do_http(**_get_request_parameters_for_fragment( options, 'status?t=json', method='POST', post_args=dict( t='json', token=token, ), )), options['node-url'], ) statistics_data = _handle_response_for_fragment( do_http(**_get_request_parameters_for_fragment( options, 'statistics?t=json', method='POST', post_args=dict( t='json', token=token, ), )), options['node-url'], ) except Exception as e: print(u"failed to retrieve data: %s" % str(e), file=options.stderr) return 2 downloaded_bytes = statistics_data['counters'].get('downloader.bytes_downloaded', 0) downloaded_files = statistics_data['counters'].get('downloader.files_downloaded', 0) uploaded_bytes = statistics_data['counters'].get('uploader.bytes_uploaded', 0) uploaded_files = statistics_data['counters'].get('uploader.files_uploaded', 0) print(u"Statistics (for last {}):".format(abbreviate_time(statistics_data['stats']['node.uptime'])), file=options.stdout) print(u" uploaded {} in {} files".format(abbreviate_space(uploaded_bytes), uploaded_files), file=options.stdout) print(u" downloaded {} in {} files".format(abbreviate_space(downloaded_bytes), downloaded_files), file=options.stdout) print(u"", file=options.stdout) render_active(options.stdout, status_data) render_recent(options['verbose'], options.stdout, status_data) # open question: should we return non-zero if there were no # operations at all to display? return 0
def format_recovered(sr, a): def maybe(d): if d is None: return "?" return "%d" % d return "%s shares, %s buckets (%s mutable / %s immutable), %s (%s / %s)" % \ (maybe(sr["%s-shares" % a]), maybe(sr["%s-buckets" % a]), maybe(sr["%s-buckets-mutable" % a]), maybe(sr["%s-buckets-immutable" % a]), abbreviate_space(sr["%s-diskbytes" % a]), abbreviate_space(sr["%s-diskbytes-mutable" % a]), abbreviate_space(sr["%s-diskbytes-immutable" % a]), )
def format_recovered(self, sr, a): def maybe(d): if d is None: return "?" return "%d" % d return "%s shares, %s buckets (%s mutable / %s immutable), %s (%s / %s)" % \ (maybe(sr["%s-shares" % a]), maybe(sr["%s-buckets" % a]), maybe(sr["%s-buckets-mutable" % a]), maybe(sr["%s-buckets-immutable" % a]), abbreviate_space(sr["%s-diskbytes" % a]), abbreviate_space(sr["%s-diskbytes-mutable" % a]), abbreviate_space(sr["%s-diskbytes-immutable" % a]), )
def _render_recent_generic(op): return { u"op_type": OP_MAP[op["type"]], u"storage-index-string": op["storage-index-string"], u"nice_size": abbreviate_space(op["total-size"]), u"status": op["status"], }
def _format_file_line(now, name, child): """ Format one Tahoe-LAFS filenode as a unicode string. :param datetime now: A time to use as current. :param unicode name: The name of the file. :param child: Metadata describing the file. The format is like the format of a filenode inside a dirnode's **children**. See the Tahoe-LAFS Web API frontend documentation for details. :return unicode: Text roughly describing the filenode to a person. """ captype, meta = child if captype != 'filenode': return u"%20s: error, should be a filecap (not %s)" % (name, captype) status = 'good' size = meta['size'] created = datetime.fromtimestamp(meta['metadata']['tahoe']['linkcrtime']) version = meta['metadata']['version'] nice_size = abbreviate_space(size) nice_created = abbreviate_time(now - created) return u" %s (%s): %s, version=%s, created %s" % ( name, nice_size, status, version, nice_created, )
def do_status(options): nodedir = options["node-directory"] with open(os.path.join(nodedir, u'private', u'api_auth_token'), 'r') as f: token = f.read().strip() with open(os.path.join(nodedir, u'node.url'), 'r') as f: options['node-url'] = f.read().strip() # do *all* our data-retrievals first in case there's an error try: status_data = _get_json_for_fragment( options, 'status?t=json', method='POST', post_args=dict( t='json', token=token, ) ) statistics_data = _get_json_for_fragment( options, 'statistics?t=json', method='POST', post_args=dict( t='json', token=token, ) ) except Exception as e: print(u"failed to retrieve data: %s" % str(e), file=options.stderr) return 2 downloaded_bytes = statistics_data['counters'].get('downloader.bytes_downloaded', 0) downloaded_files = statistics_data['counters'].get('downloader.files_downloaded', 0) uploaded_bytes = statistics_data['counters'].get('uploader.bytes_uploaded', 0) uploaded_files = statistics_data['counters'].get('uploader.files_uploaded', 0) print(u"Statistics (for last {}):".format(abbreviate_time(statistics_data['stats']['node.uptime'])), file=options.stdout) print(u" uploaded {} in {} files".format(abbreviate_space(uploaded_bytes), uploaded_files), file=options.stdout) print(u" downloaded {} in {} files".format(abbreviate_space(downloaded_bytes), downloaded_files), file=options.stdout) print(u"", file=options.stdout) render_active(options.stdout, status_data) render_recent(options['verbose'], options.stdout, status_data) # open question: should we return non-zero if there were no # operations at all to display? return 0
def test_space(self): tests_si = [ (None, "unknown"), (0, "0 B"), (1, "1 B"), (999, "999 B"), (1000, "1000 B"), (1023, "1023 B"), (1024, "1.02 kB"), (20 * 1000, "20.00 kB"), (1024 * 1024, "1.05 MB"), (1000 * 1000, "1.00 MB"), (1000 * 1000 * 1000, "1.00 GB"), (1000 * 1000 * 1000 * 1000, "1.00 TB"), (1000 * 1000 * 1000 * 1000 * 1000, "1.00 PB"), (1000 * 1000 * 1000 * 1000 * 1000 * 1000, "1.00 EB"), (1234567890123456789, "1.23 EB"), ] for (x, expected) in tests_si: got = abbreviate.abbreviate_space(x, SI=True) self.failUnlessEqual(got, expected) tests_base1024 = [ (None, "unknown"), (0, "0 B"), (1, "1 B"), (999, "999 B"), (1000, "1000 B"), (1023, "1023 B"), (1024, "1.00 kiB"), (20 * 1024, "20.00 kiB"), (1000 * 1000, "976.56 kiB"), (1024 * 1024, "1.00 MiB"), (1024 * 1024 * 1024, "1.00 GiB"), (1024 * 1024 * 1024 * 1024, "1.00 TiB"), (1000 * 1000 * 1000 * 1000 * 1000, "909.49 TiB"), (1024 * 1024 * 1024 * 1024 * 1024, "1.00 PiB"), (1024 * 1024 * 1024 * 1024 * 1024 * 1024, "1.00 EiB"), (1234567890123456789, "1.07 EiB"), ] for (x, expected) in tests_base1024: got = abbreviate.abbreviate_space(x, SI=False) self.failUnlessEqual(got, expected) self.failUnlessEqual(abbreviate.abbreviate_space_both(1234567), "(1.23 MB, 1.18 MiB)")
def status(options): nodedir = options["node-directory"] with open(os.path.join(nodedir, u"private", u"magic_folder_dircap")) as f: dmd_cap = f.read().strip() with open(os.path.join(nodedir, u"private", u"collective_dircap")) as f: collective_readcap = f.read().strip() with open(os.path.join(nodedir, u'private', u'api_auth_token'), 'rb') as f: token = f.read() # do *all* our data-retrievals first in case there's an error try: dmd_data = _get_json_for_cap(options, dmd_cap) remote_data = _get_json_for_cap(options, collective_readcap) magic_data = _get_json_for_fragment( options, 'magic_folder?t=json', method='POST', post_args=dict( t='json', token=token, ) ) except Exception as e: print >>stderr, "failed to retrieve data: %s" % str(e) return 2 for d in [dmd_data, remote_data, magic_data]: if isinstance(d, dict) and 'error' in d: print >>stderr, "Error from server: %s" % d['error'] print >>stderr, "This means we can't retrieve the remote shared directory." return 3 captype, dmd = dmd_data if captype != 'dirnode': print >>stderr, "magic_folder_dircap isn't a directory capability" return 2 now = datetime.now() print "Local files:" for (name, child) in dmd['children'].items(): captype, meta = child status = 'good' size = meta['size'] created = datetime.fromtimestamp(meta['metadata']['tahoe']['linkcrtime']) version = meta['metadata']['version'] nice_size = abbreviate_space(size) nice_created = abbreviate_time(now - created) if captype != 'filenode': print "%20s: error, should be a filecap" % name continue print " %s (%s): %s, version=%s, created %s" % (name, nice_size, status, version, nice_created) print print "Remote files:" captype, collective = remote_data for (name, data) in collective['children'].items(): if data[0] != 'dirnode': print "Error: '%s': expected a dirnode, not '%s'" % (name, data[0]) print " %s's remote:" % name dmd = _get_json_for_cap(options, data[1]['ro_uri']) if isinstance(dmd, dict) and 'error' in dmd: print(" Error: could not retrieve directory") continue if dmd[0] != 'dirnode': print "Error: should be a dirnode" continue for (n, d) in dmd[1]['children'].items(): if d[0] != 'filenode': print "Error: expected '%s' to be a filenode." % (n,) meta = d[1] status = 'good' size = meta['size'] created = datetime.fromtimestamp(meta['metadata']['tahoe']['linkcrtime']) version = meta['metadata']['version'] nice_size = abbreviate_space(size) nice_created = abbreviate_time(now - created) print " %s (%s): %s, version=%s, created %s" % (n, nice_size, status, version, nice_created) if len(magic_data): uploads = [item for item in magic_data if item['kind'] == 'upload'] downloads = [item for item in magic_data if item['kind'] == 'download'] longest = max([len(item['path']) for item in magic_data]) if True: # maybe --show-completed option or something? uploads = [item for item in uploads if item['status'] != 'success'] downloads = [item for item in downloads if item['status'] != 'success'] if len(uploads): print print "Uploads:" for item in uploads: _print_item_status(item, now, longest) if len(downloads): print print "Downloads:" for item in downloads: _print_item_status(item, now, longest) for item in magic_data: if item['status'] == 'failure': print "Failed:", item return 0
def do_status(options): nodedir = options["node-directory"] with open(os.path.join(nodedir, u'private', u'api_auth_token'), 'r') as f: token = f.read().strip() with open(os.path.join(nodedir, u'node.url'), 'r') as f: options['node-url'] = f.read().strip() # do *all* our data-retrievals first in case there's an error try: status_data = _get_json_for_fragment( options, 'status?t=json', method='POST', post_args=dict( t='json', token=token, ) ) statistics_data = _get_json_for_fragment( options, 'statistics?t=json', method='POST', post_args=dict( t='json', token=token, ) ) except Exception as e: print(u"failed to retrieve data: %s" % str(e), file=options.stderr) return 2 downloaded_bytes = statistics_data['counters'].get('downloader.bytes_downloaded', 0) downloaded_files = statistics_data['counters'].get('downloader.files_downloaded', 0) uploaded_bytes = statistics_data['counters'].get('uploader.bytes_uploaded', 0) uploaded_files = statistics_data['counters'].get('uploader.files_uploaded', 0) print(u"Statistics (for last {}):".format(abbreviate_time(statistics_data['stats']['node.uptime'])), file=options.stdout) print(u" uploaded {} in {} files".format(abbreviate_space(uploaded_bytes), uploaded_files), file=options.stdout) print(u" downloaded {} in {} files".format(abbreviate_space(downloaded_bytes), downloaded_files), file=options.stdout) print(u"", file=options.stdout) if status_data.get('active', None): print(u"Active operations:", file=options.stdout) print( u"\u2553 {:<5} \u2565 {:<26} \u2565 {:<22} \u2565 {}".format( "type", "storage index", "progress", "status message", ), file=options.stdout ) print(u"\u255f\u2500{}\u2500\u256b\u2500{}\u2500\u256b\u2500{}\u2500\u256b\u2500{}".format(u'\u2500' * 5, u'\u2500' * 26, u'\u2500' * 22, u'\u2500' * 20), file=options.stdout) for op in status_data['active']: if 'progress-hash' in op: op_type = ' put ' total = (op['progress-hash'] + op['progress-ciphertext'] + op['progress-encode-push']) / 3.0 progress_bar = u"{}".format(pretty_progress(total * 100.0, size=15)) else: op_type = ' get ' total = op['progress'] progress_bar = u"{}".format(pretty_progress(op['progress'] * 100.0, size=15)) print( u"\u2551 {op_type} \u2551 {storage-index-string} \u2551 {progress_bar} ({total:3}%) \u2551 {status}".format( op_type=op_type, progress_bar=progress_bar, total=int(total * 100.0), **op ), file=options.stdout ) print(u"\u2559\u2500{}\u2500\u2568\u2500{}\u2500\u2568\u2500{}\u2500\u2568\u2500{}".format(u'\u2500' * 5, u'\u2500' * 26, u'\u2500' * 22, u'\u2500' * 20), file=options.stdout) else: print(u"No active operations.", file=options.stdout) if status_data.get('recent', None): non_verbose_ops = ('upload', 'download') recent = [op for op in status_data['recent'] if op['type'] in non_verbose_ops] print(u"\nRecent operations:", file=options.stdout) if len(recent) or options['verbose']: print( u"\u2553 {:<5} \u2565 {:<26} \u2565 {:<10} \u2565 {}".format( "type", "storage index", "size", "status message", ), file=options.stdout ) op_map = { 'upload': ' put ', 'download': ' get ', 'retrieve': 'retr ', 'publish': ' pub ', 'mapupdate': 'mapup', } ops_to_show = status_data['recent'] if options['verbose'] else recent for op in ops_to_show: op_type = op_map[op.get('type', None)] if op['type'] == 'mapupdate': nice_size = op['mode'] else: nice_size = abbreviate_space(op['total-size']) print( u"\u2551 {op_type} \u2551 {storage-index-string} \u2551 {nice_size:<10} \u2551 {status}".format( op_type=op_type, nice_size=nice_size, **op ), file=options.stdout ) if len(recent) or options['verbose']: print(u"\u2559\u2500{}\u2500\u2568\u2500{}\u2500\u2568\u2500{}\u2500\u2568\u2500{}".format(u'\u2500' * 5, u'\u2500' * 26, u'\u2500' * 10, u'\u2500' * 20), file=options.stdout) skipped = len(status_data['recent']) - len(ops_to_show) if not options['verbose'] and skipped: print(u" Skipped {} non-upload/download operations; use --verbose to see".format(skipped), file=options.stdout) else: print(u"No recent operations.", file=options.stdout) # open question: should we return non-zero if there were no # operations at all to display? return 0
def render_abbrev_space(self, ctx, size): if size is None: return "?" return abbreviate_space(size)
def render_abbrev_space(self, size): if size is None: return u"?" return abbreviate_space(size)
def lease_current_cycle_results(self, req, tag): lc = self._storage.lease_checker p = lc.get_progress() if not p["cycle-in-progress"]: return "" s = lc.get_state() so_far = s["cycle-to-date"] sr = so_far["space-recovered"] er = s["estimated-remaining-cycle"] esr = er["space-recovered"] ec = s["estimated-current-cycle"] ecr = ec["space-recovered"] p = T.ul() def add(*pieces): p(T.li(pieces)) def maybe(d): if d is None: return "?" return "%d" % d add( "So far, this cycle has examined %d shares in %d buckets" % (sr["examined-shares"], sr["examined-buckets"]), " (%d mutable / %d immutable)" % (sr["examined-buckets-mutable"], sr["examined-buckets-immutable"]), " (%s / %s)" % (abbreviate_space(sr["examined-diskbytes-mutable"]), abbreviate_space(sr["examined-diskbytes-immutable"])), ) add("and has recovered: ", self.format_recovered(sr, "actual")) if so_far["expiration-enabled"]: add("The remainder of this cycle is expected to recover: ", self.format_recovered(esr, "actual")) add("The whole cycle is expected to examine %s shares in %s buckets" % (maybe(ecr["examined-shares"]), maybe( ecr["examined-buckets"]))) add("and to recover: ", self.format_recovered(ecr, "actual")) else: add("If expiration were enabled, we would have recovered: ", self.format_recovered(sr, "configured"), " by now") add("and the remainder of this cycle would probably recover: ", self.format_recovered(esr, "configured")) add("and the whole cycle would probably recover: ", self.format_recovered(ecr, "configured")) add( "if we were strictly using each lease's default 31-day lease lifetime " "(instead of our configured behavior), " "this cycle would be expected to recover: ", self.format_recovered(ecr, "original")) if so_far["corrupt-shares"]: add( "Corrupt shares:", T.ul((T.li([ "SI %s shnum %d" % corrupt_share for corrupt_share in so_far["corrupt-shares"] ])))) return tag("Current cycle:", p)