Exemplo n.º 1
0
def get_rbd_mapped():
    result = []
    mapped = rbd('--format', 'json', '-t', 'nbd', 'device', 'list')
    for mapped in json.loads(mapped.stdout):
        info = Ceph(None).backup.info(mapped['image'])['parent']
        part = Part(dev=mapped['device'],
                    image=mapped['image'],
                    parent_image=info['image'],
                    parent_snap=info['snapshot'])
        result.append(part)
    return result
Exemplo n.º 2
0
def du(image):
    data = rbd('--format', 'json', '-p', pool, 'du', image)
    data = data.stdout.decode('utf-8')
    data = json.loads(data)['images']
    return data
Exemplo n.º 3
0
import humanize
import multiprocessing
from config import config
from sh import rbd

pool = config['backup_cluster']['pool']


def du(image):
    data = rbd('--format', 'json', '-p', pool, 'du', image)
    data = data.stdout.decode('utf-8')
    data = json.loads(data)['images']
    return data


data = rbd('--format', 'json', '-p', pool, 'ls')
data = data.stdout.decode('utf-8')
data = json.loads(data)

result = {}

with multiprocessing.Pool(config['backup_worker']) as p:
    for sizes in p.imap_unordered(du, data):
        for i in sizes:
            try:
                result[i['name']] += i['used_size']
            except KeyError:
                result[i['name']] = i['used_size']

result = [(k, result[k]) for k in sorted(result, key=result.get)]
for key, value in result: