def du(paths, grand_total, summarize, human_readable): """Display disk usage statistics.""" results = [] for path in paths: npath = normalize_path(path) if ispathdir(path): cf = CloudFiles(npath, green=True) results.append(cf.size(cf.list())) else: cf = CloudFiles(os.path.dirname(npath), green=True) sz = cf.size(os.path.basename(npath)) if sz is None: print(f"cloudfiles: du: {path} does not exist") return results.append({path: sz}) def SI(val): if not human_readable: return val if val < 1024: return f"{val} Bytes" elif val < 2**20: return f"{(val / 2**10):.2f} KiB" elif val < 2**30: return f"{(val / 2**20):.2f} MiB" elif val < 2**40: return f"{(val / 2**30):.2f} GiB" elif val < 2**50: return f"{(val / 2**40):.2f} TiB" elif val < 2**60: return f"{(val / 2**50):.2f} PiB" else: return f"{(val / 2**60):.2f} EiB" summary = {} for path, res in zip(paths, results): summary[path] = sum(res.values()) if summarize: print(f"{SI(summary[path])}\t{path}") if not summarize: for res in results: for pth, size in res.items(): print(f"{SI(size)}\t{pth}") if grand_total: print(f"{SI(sum(summary.values()))}\ttotal")
def test_size(s3, protocol, compress, green): from cloudfiles import CloudFiles, exceptions, compression url = compute_url(protocol, 'size') cf = CloudFiles(url) content = b'some_string' cf.put('info', content, compress=compress, cache_control='no-cache') cf['info2'] = content cf.put('zero', b'', compress=None, cache_control='no-cache') compressed_content = compression.compress(content, compress) assert cf.size('info') == len(compressed_content) assert cf.size(['info', 'info2']) == { "info": len(compressed_content), "info2": len(content) } assert cf.size('nonexistent') is None assert cf.size('zero') == 0 cf.delete(['info', 'info2', 'zero'])