def _print_item_status(item, now, longest): paddedname = (' ' * (longest - len(item['path']))) + item['path'] if 'failure_at' in item: ts = datetime.fromtimestamp(item['started_at']) prog = 'Failed %s (%s)' % (abbreviate_time(now - ts), ts) elif item['percent_done'] < 100.0: if 'started_at' not in item: prog = 'not yet started' else: so_far = now - datetime.fromtimestamp(item['started_at']) if so_far.seconds > 0.0: rate = item['percent_done'] / so_far.seconds if rate != 0: time_left = (100.0 - item['percent_done']) / rate prog = '%2.1f%% done, around %s left' % ( item['percent_done'], abbreviate_time(time_left), ) else: time_left = None prog = '%2.1f%% done' % (item['percent_done'],) else: prog = 'just started' else: prog = '' for verb in ['finished', 'started', 'queued']: keyname = verb + '_at' if keyname in item: when = datetime.fromtimestamp(item[keyname]) prog = '%s %s' % (verb, abbreviate_time(now - when)) break print " %s: %s" % (paddedname, prog)
def _format_file_line(now, name, child): """ Format one Tahoe-LAFS filenode as a unicode string. :param datetime now: A time to use as current. :param unicode name: The name of the file. :param child: Metadata describing the file. The format is like the format of a filenode inside a dirnode's **children**. See the Tahoe-LAFS Web API frontend documentation for details. :return unicode: Text roughly describing the filenode to a person. """ captype, meta = child if captype != 'filenode': return u"%20s: error, should be a filecap (not %s)" % (name, captype) status = 'good' size = meta['size'] created = datetime.fromtimestamp(meta['metadata']['tahoe']['linkcrtime']) version = meta['metadata']['version'] nice_size = abbreviate_space(size) nice_created = abbreviate_time(now - created) return u" %s (%s): %s, version=%s, created %s" % ( name, nice_size, status, version, nice_created, )
def do_status(options, do_http=None): if do_http is None: from allmydata.scripts.common_http import do_http nodedir = options["node-directory"] with open(os.path.join(nodedir, u'private', u'api_auth_token'), 'r') as f: token = f.read().strip() with open(os.path.join(nodedir, u'node.url'), 'r') as f: options['node-url'] = f.read().strip() # do *all* our data-retrievals first in case there's an error try: status_data = _handle_response_for_fragment( do_http(**_get_request_parameters_for_fragment( options, 'status?t=json', method='POST', post_args=dict( t='json', token=token, ), )), options['node-url'], ) statistics_data = _handle_response_for_fragment( do_http(**_get_request_parameters_for_fragment( options, 'statistics?t=json', method='POST', post_args=dict( t='json', token=token, ), )), options['node-url'], ) except Exception as e: print(u"failed to retrieve data: %s" % str(e), file=options.stderr) return 2 downloaded_bytes = statistics_data['counters'].get('downloader.bytes_downloaded', 0) downloaded_files = statistics_data['counters'].get('downloader.files_downloaded', 0) uploaded_bytes = statistics_data['counters'].get('uploader.bytes_uploaded', 0) uploaded_files = statistics_data['counters'].get('uploader.files_uploaded', 0) print(u"Statistics (for last {}):".format(abbreviate_time(statistics_data['stats']['node.uptime'])), file=options.stdout) print(u" uploaded {} in {} files".format(abbreviate_space(uploaded_bytes), uploaded_files), file=options.stdout) print(u" downloaded {} in {} files".format(abbreviate_space(downloaded_bytes), downloaded_files), file=options.stdout) print(u"", file=options.stdout) render_active(options.stdout, status_data) render_recent(options['verbose'], options.stdout, status_data) # open question: should we return non-zero if there were no # operations at all to display? return 0
def abbreviate_time(data): # 1.23s, 790ms, 132us if data is None: return "" s = float(data) if s >= 10: return abbreviate.abbreviate_time(data) if s >= 1.0: return "%.2fs" % s if s >= 0.01: return "%.0fms" % (1000 * s) if s >= 0.001: return "%.1fms" % (1000 * s) return "%.0fus" % (1000000 * s)
def abbreviate_time(data): # 1.23s, 790ms, 132us if data is None: return "" s = float(data) if s >= 10: return abbreviate.abbreviate_time(data) if s >= 1.0: return "%.2fs" % s if s >= 0.01: return "%.0fms" % (1000*s) if s >= 0.001: return "%.1fms" % (1000*s) return "%.0fus" % (1000000*s)
def do_status(options): nodedir = options["node-directory"] with open(os.path.join(nodedir, u'private', u'api_auth_token'), 'r') as f: token = f.read().strip() with open(os.path.join(nodedir, u'node.url'), 'r') as f: options['node-url'] = f.read().strip() # do *all* our data-retrievals first in case there's an error try: status_data = _get_json_for_fragment( options, 'status?t=json', method='POST', post_args=dict( t='json', token=token, ) ) statistics_data = _get_json_for_fragment( options, 'statistics?t=json', method='POST', post_args=dict( t='json', token=token, ) ) except Exception as e: print(u"failed to retrieve data: %s" % str(e), file=options.stderr) return 2 downloaded_bytes = statistics_data['counters'].get('downloader.bytes_downloaded', 0) downloaded_files = statistics_data['counters'].get('downloader.files_downloaded', 0) uploaded_bytes = statistics_data['counters'].get('uploader.bytes_uploaded', 0) uploaded_files = statistics_data['counters'].get('uploader.files_uploaded', 0) print(u"Statistics (for last {}):".format(abbreviate_time(statistics_data['stats']['node.uptime'])), file=options.stdout) print(u" uploaded {} in {} files".format(abbreviate_space(uploaded_bytes), uploaded_files), file=options.stdout) print(u" downloaded {} in {} files".format(abbreviate_space(downloaded_bytes), downloaded_files), file=options.stdout) print(u"", file=options.stdout) render_active(options.stdout, status_data) render_recent(options['verbose'], options.stdout, status_data) # open question: should we return non-zero if there were no # operations at all to display? return 0
def abbreviate_time(data): """ Convert number of seconds into human readable string. :param data: Either ``None`` or integer or float, seconds. :return: Unicode string. """ # 1.23s, 790ms, 132us if data is None: return u"" s = float(data) if s >= 10: return abbreviate.abbreviate_time(data) if s >= 1.0: return u"%.2fs" % s if s >= 0.01: return u"%.0fms" % (1000 * s) if s >= 0.001: return u"%.1fms" % (1000 * s) return u"%.0fus" % (1000000 * s)
def status(options): nodedir = options["node-directory"] with open(os.path.join(nodedir, u"private", u"magic_folder_dircap")) as f: dmd_cap = f.read().strip() with open(os.path.join(nodedir, u"private", u"collective_dircap")) as f: collective_readcap = f.read().strip() with open(os.path.join(nodedir, u'private', u'api_auth_token'), 'rb') as f: token = f.read() # do *all* our data-retrievals first in case there's an error try: dmd_data = _get_json_for_cap(options, dmd_cap) remote_data = _get_json_for_cap(options, collective_readcap) magic_data = _get_json_for_fragment( options, 'magic_folder?t=json', method='POST', post_args=dict( t='json', token=token, ) ) except Exception as e: print >>stderr, "failed to retrieve data: %s" % str(e) return 2 for d in [dmd_data, remote_data, magic_data]: if isinstance(d, dict) and 'error' in d: print >>stderr, "Error from server: %s" % d['error'] print >>stderr, "This means we can't retrieve the remote shared directory." return 3 captype, dmd = dmd_data if captype != 'dirnode': print >>stderr, "magic_folder_dircap isn't a directory capability" return 2 now = datetime.now() print "Local files:" for (name, child) in dmd['children'].items(): captype, meta = child status = 'good' size = meta['size'] created = datetime.fromtimestamp(meta['metadata']['tahoe']['linkcrtime']) version = meta['metadata']['version'] nice_size = abbreviate_space(size) nice_created = abbreviate_time(now - created) if captype != 'filenode': print "%20s: error, should be a filecap" % name continue print " %s (%s): %s, version=%s, created %s" % (name, nice_size, status, version, nice_created) print print "Remote files:" captype, collective = remote_data for (name, data) in collective['children'].items(): if data[0] != 'dirnode': print "Error: '%s': expected a dirnode, not '%s'" % (name, data[0]) print " %s's remote:" % name dmd = _get_json_for_cap(options, data[1]['ro_uri']) if isinstance(dmd, dict) and 'error' in dmd: print(" Error: could not retrieve directory") continue if dmd[0] != 'dirnode': print "Error: should be a dirnode" continue for (n, d) in dmd[1]['children'].items(): if d[0] != 'filenode': print "Error: expected '%s' to be a filenode." % (n,) meta = d[1] status = 'good' size = meta['size'] created = datetime.fromtimestamp(meta['metadata']['tahoe']['linkcrtime']) version = meta['metadata']['version'] nice_size = abbreviate_space(size) nice_created = abbreviate_time(now - created) print " %s (%s): %s, version=%s, created %s" % (n, nice_size, status, version, nice_created) if len(magic_data): uploads = [item for item in magic_data if item['kind'] == 'upload'] downloads = [item for item in magic_data if item['kind'] == 'download'] longest = max([len(item['path']) for item in magic_data]) if True: # maybe --show-completed option or something? uploads = [item for item in uploads if item['status'] != 'success'] downloads = [item for item in downloads if item['status'] != 'success'] if len(uploads): print print "Uploads:" for item in uploads: _print_item_status(item, now, longest) if len(downloads): print print "Downloads:" for item in downloads: _print_item_status(item, now, longest) for item in magic_data: if item['status'] == 'failure': print "Failed:", item return 0
def do_status(options): nodedir = options["node-directory"] with open(os.path.join(nodedir, u'private', u'api_auth_token'), 'r') as f: token = f.read().strip() with open(os.path.join(nodedir, u'node.url'), 'r') as f: options['node-url'] = f.read().strip() # do *all* our data-retrievals first in case there's an error try: status_data = _get_json_for_fragment( options, 'status?t=json', method='POST', post_args=dict( t='json', token=token, ) ) statistics_data = _get_json_for_fragment( options, 'statistics?t=json', method='POST', post_args=dict( t='json', token=token, ) ) except Exception as e: print(u"failed to retrieve data: %s" % str(e), file=options.stderr) return 2 downloaded_bytes = statistics_data['counters'].get('downloader.bytes_downloaded', 0) downloaded_files = statistics_data['counters'].get('downloader.files_downloaded', 0) uploaded_bytes = statistics_data['counters'].get('uploader.bytes_uploaded', 0) uploaded_files = statistics_data['counters'].get('uploader.files_uploaded', 0) print(u"Statistics (for last {}):".format(abbreviate_time(statistics_data['stats']['node.uptime'])), file=options.stdout) print(u" uploaded {} in {} files".format(abbreviate_space(uploaded_bytes), uploaded_files), file=options.stdout) print(u" downloaded {} in {} files".format(abbreviate_space(downloaded_bytes), downloaded_files), file=options.stdout) print(u"", file=options.stdout) if status_data.get('active', None): print(u"Active operations:", file=options.stdout) print( u"\u2553 {:<5} \u2565 {:<26} \u2565 {:<22} \u2565 {}".format( "type", "storage index", "progress", "status message", ), file=options.stdout ) print(u"\u255f\u2500{}\u2500\u256b\u2500{}\u2500\u256b\u2500{}\u2500\u256b\u2500{}".format(u'\u2500' * 5, u'\u2500' * 26, u'\u2500' * 22, u'\u2500' * 20), file=options.stdout) for op in status_data['active']: if 'progress-hash' in op: op_type = ' put ' total = (op['progress-hash'] + op['progress-ciphertext'] + op['progress-encode-push']) / 3.0 progress_bar = u"{}".format(pretty_progress(total * 100.0, size=15)) else: op_type = ' get ' total = op['progress'] progress_bar = u"{}".format(pretty_progress(op['progress'] * 100.0, size=15)) print( u"\u2551 {op_type} \u2551 {storage-index-string} \u2551 {progress_bar} ({total:3}%) \u2551 {status}".format( op_type=op_type, progress_bar=progress_bar, total=int(total * 100.0), **op ), file=options.stdout ) print(u"\u2559\u2500{}\u2500\u2568\u2500{}\u2500\u2568\u2500{}\u2500\u2568\u2500{}".format(u'\u2500' * 5, u'\u2500' * 26, u'\u2500' * 22, u'\u2500' * 20), file=options.stdout) else: print(u"No active operations.", file=options.stdout) if status_data.get('recent', None): non_verbose_ops = ('upload', 'download') recent = [op for op in status_data['recent'] if op['type'] in non_verbose_ops] print(u"\nRecent operations:", file=options.stdout) if len(recent) or options['verbose']: print( u"\u2553 {:<5} \u2565 {:<26} \u2565 {:<10} \u2565 {}".format( "type", "storage index", "size", "status message", ), file=options.stdout ) op_map = { 'upload': ' put ', 'download': ' get ', 'retrieve': 'retr ', 'publish': ' pub ', 'mapupdate': 'mapup', } ops_to_show = status_data['recent'] if options['verbose'] else recent for op in ops_to_show: op_type = op_map[op.get('type', None)] if op['type'] == 'mapupdate': nice_size = op['mode'] else: nice_size = abbreviate_space(op['total-size']) print( u"\u2551 {op_type} \u2551 {storage-index-string} \u2551 {nice_size:<10} \u2551 {status}".format( op_type=op_type, nice_size=nice_size, **op ), file=options.stdout ) if len(recent) or options['verbose']: print(u"\u2559\u2500{}\u2500\u2568\u2500{}\u2500\u2568\u2500{}\u2500\u2568\u2500{}".format(u'\u2500' * 5, u'\u2500' * 26, u'\u2500' * 10, u'\u2500' * 20), file=options.stdout) skipped = len(status_data['recent']) - len(ops_to_show) if not options['verbose'] and skipped: print(u" Skipped {} non-upload/download operations; use --verbose to see".format(skipped), file=options.stdout) else: print(u"No recent operations.", file=options.stdout) # open question: should we return non-zero if there were no # operations at all to display? return 0
def test_abbrev_time_year(self): diff = timedelta(weeks=(5 * 52) + 1) s = abbreviate.abbreviate_time(diff) self.assertEqual('5 years ago', s)
def test_abbrev_time_month(self): diff = timedelta(days=91) s = abbreviate.abbreviate_time(diff) self.assertEqual('3 months ago', s)
def test_abbrev_time_day(self): diff = timedelta(hours=49) # must be more than 2 days s = abbreviate.abbreviate_time(diff) self.assertEqual('2 days ago', s)
def test_abbrev_time_hours(self): diff = timedelta(hours=4) s = abbreviate.abbreviate_time(diff) self.assertEqual('4 hours ago', s)
def test_abbrev_time_future_5_minutes(self): diff = timedelta(minutes=-5) s = abbreviate.abbreviate_time(diff) self.assertEqual('5 minutes in the future', s)
def test_abbrev_time_25s(self): diff = timedelta(seconds=25) s = abbreviate.abbreviate_time(diff) self.assertEqual('25 seconds ago', s)
def test_abbrev_time_1s(self): diff = timedelta(seconds=1) s = abbreviate.abbreviate_time(diff) self.assertEqual('1 second ago', s)