def get_dir_total_size(dir_name: str) -> (int, str): total_size = 0 for file_name in iglob(dir_name + '/**/*', recursive=True): try: if os.path.isfile(file_name): total_size += os.path.getsize(file_name) except Exception as e: print('File: "{}", error: "{}"'.format(file_name, e)) return total_size, sizeof_fmt(total_size)
def get_image_info(file_name__or__bytes__or__bytes_io, pretty_json_str=False): data = file_name__or__bytes__or__bytes_io type_data = type(data) # File name if type_data == str: with open(data, mode='rb') as f: data = f.read() if type(data) == bytes: data = io.BytesIO(data) length = len(data.getvalue()) exif = get_exif_tags(data) img = Image.open(data) # Save order info = dict() info['length'] = { 'value': length, 'text': sizeof_fmt(length), } info['format'] = img.format info['mode'] = img.mode info['channels'] = len(img.getbands()) info['bit_color'] = { '1': 1, 'L': 8, 'P': 8, 'RGB': 24, 'RGBA': 32, 'CMYK': 32, 'YCbCr': 24, 'I': 32, 'F': 32 }[img.mode] info['size'] = { 'width': img.width, 'height': img.height, } info['exif'] = exif if pretty_json_str: info = json.dumps(info, indent=4, ensure_ascii=False) return info
#!/usr/bin/env python3 # -*- coding: utf-8 -*- __author__ = 'ipetrash' import os from os.path import getsize, join from human_byte_size import sizeof_fmt # Словарь нужен чтобы помнить размер папки. Когда итератор дойдет до родительской папки # в словаре уже будут размер вложенных папок dir_sizes = dict() for root, dirs, files in os.walk('.', topdown=False): size = sum(getsize(join(root, f)) for f in files) size += sum(dir_sizes[join(root, d)] for d in dirs) dir_sizes[root] = size for path, total_size in sorted(dir_sizes.items(), key=lambda x: x[0]): print('{} : {} ({})'.format(path, sizeof_fmt(total_size), total_size))
#!/usr/bin/env python3 # -*- coding: utf-8 -*- __author__ = 'ipetrash' import zipfile import sys from pathlib import Path sys.path.append(str(Path(__file__).resolve().parent.parent.parent)) from human_byte_size import sizeof_fmt FILE_NAME = Path('Doc_df7c89c378c04e8daf69257ea95d9a2e.zip') print('Zip size:', sizeof_fmt(len(FILE_NAME.read_bytes()))) with zipfile.ZipFile('Doc_df7c89c378c04e8daf69257ea95d9a2e.zip') as f: data_file = f.read('Doc_df7c89c378c04e8daf69257ea95d9a2e.html') size = sizeof_fmt(len(data_file)) print(f'File size: {size}') print(f'data_file[:100]: {data_file[:100]}')
import sys from pathlib import Path from collections import defaultdict # pip install psutil import psutil sys.path.append(str(Path(__file__).resolve().parent.parent)) from human_byte_size import sizeof_fmt column_width = defaultdict(int) process_list = [] for p in psutil.process_iter(): memory = p.memory_info().rss cols = p.name(), str(memory) + ' bytes', sizeof_fmt(memory) process_list.append(cols) for i, x in enumerate(cols): column_width[i] = max(column_width[i], len(x)) # Sort by memory size process_list.sort(key=lambda x: int(x[1].split(' ')[0]), reverse=True) for p in process_list: row = [x.rjust(column_width[i]) for i, x in enumerate(p)] print(' | '.join(row))
# pip install psutil import psutil sys.path.append(str(Path(__file__).resolve().parent.parent)) from human_byte_size import sizeof_fmt print('Disk partitions:') for disk in psutil.disk_partitions(): print(' {}'.format(disk)) print() print('Disk usage:') for disk in filter(lambda x: 'fixed' in x.opts, psutil.disk_partitions()): info = psutil.disk_usage(disk.device) print(' {} {}'.format(disk.device, info)) print(' {} free of {}'.format(sizeof_fmt(info.free), sizeof_fmt(info.total))) print() print() print('Disk io (input/output) total sum counters:') print(' {}'.format(psutil.disk_io_counters())) physical_drive_by_info = list(psutil.disk_io_counters(True).items()) print() print('Physical drive io (input/output) counters ({}):'.format( len(physical_drive_by_info))) # # for drive, info in physical_drive_by_info: # print(' {}: {}'.format(drive, info))
# pip install tqdm from tqdm import tqdm # pip install requests import requests sys.path.append(str(Path(__file__).resolve().parent.parent.parent)) from human_byte_size import sizeof_fmt url = 'https://github.com/gil9red/NotesManager/raw/master/bin.rar' # Streaming, so we can iterate over the response. rs = requests.get(url, stream=True) # Total size in bytes. total_size = int(rs.headers.get('content-length', 0)) print('From content-length:', sizeof_fmt(total_size)) chunk_size = 1024 num_bars = int(total_size / chunk_size) file_name = os.path.basename(url) with open(file_name, mode='wb') as f: for data in tqdm(rs.iter_content(chunk_size), total=num_bars, unit='KB', file=sys.stdout): f.write(data) # Read from file file_data = open(file_name, mode='rb').read() print('File data size:', sizeof_fmt(len(file_data)))
paths = sorted(set(paths)) for file_name in paths: size, size_str = get_dir_total_size(file_name) print('{:<15} {:10} {}'.format(size, size_str, file_name)) total_size += size disc = file_name[0] total_size_by_disc[disc] += size total_items.append((size, size_str, file_name)) disc_by_total_items[disc].append((size, size_str, file_name)) print() print('Total size:', total_size, sizeof_fmt(total_size)) for disc in sorted(total_size_by_disc): size = total_size_by_disc[disc] print(' {} {:<15} {}'.format(disc, size, sizeof_fmt(size))) print() print('Top all:') for size, size_str, file_name in sorted(total_items, key=lambda x: x[0], reverse=True)[:5]: print(' {:<15} bytes {:10} {}'.format(size, size_str, file_name)) print() for disc, total_items in disc_by_total_items.items():
td, th { padding: 3px; /* Поля вокруг содержимого таблицы */ border: 1px solid black; /* Параметры рамки */ } </style> </head> <body> <table> ''') f.write('<capture>{}</capture>'.format(dir_name)) f.write('<tr><td>{}</td><td>{}</td><td>{}</td></tr>'.format( 'FILE NAME', 'SIZE', 'LAST MODIFICATION')) for file_name, file_stat in files_sorted_by_size: f.write('<tr>') f.write('<td>{}</td><td>{}</td><td>{}</td>'.format( '<a href="file://{f}">{f}</a>'.format(f=file_name), sizeof_fmt(file_stat.st_size), get_date_as_string(file_stat.st_mtime))) f.write('</tr>') f.write(''' </table> </body> </html> ''')
def print_files_table(files: List[Dict]): rows = [(file['name'], sizeof_fmt(file['size'])) for file in sorted(files, key=lambda x: x['name'])] headers = ['#', 'File Name', 'Size'] print_table(rows, headers)
#!/usr/bin/env python3 # -*- coding: utf-8 -*- __author__ = 'ipetrash' import shutil from human_byte_size import sizeof_fmt usage = shutil.disk_usage('C://') print('{} free of {}'.format(sizeof_fmt(usage.free), sizeof_fmt(usage.total))) print() print('total: {:>8} ({} bytes)'.format(sizeof_fmt(usage.total), usage.total)) print('used: {:>8} ({} bytes)'.format(sizeof_fmt(usage.used), usage.used)) print('free: {:>8} ({} bytes)'.format(sizeof_fmt(usage.free), usage.free))