Example #1
0
def update_obj_metadata(backend, backend_factory, db,
                        thread_count, on_return):
    '''Upgrade metadata of storage objects'''

    plain_backend = backend.backend

    # No need to update sequence number, since we are going to
    # write out a new one after the upgrade.
    if backend.passphrase is None:
        extra_objects = { 's3ql_metadata' }
    else:
        extra_objects = { 's3ql_metadata',
                          's3ql_passphrase', 's3ql_passphrase_bak1',
                          's3ql_passphrase_bak2', 's3ql_passphrase_bak3' }

    for i in range(30):
        obj_id = 's3ql_metadata_bak_%d' % i
        if obj_id in plain_backend:
            extra_objects.add(obj_id)

    def yield_objects():
        for (id_,) in db.query('SELECT id FROM objects'):
            yield 's3ql_data_%d' % id_
        for obj_id in extra_objects:
            yield obj_id
    total = db.get_val('SELECT COUNT(id) FROM objects') + len(extra_objects)

    queue = Queue(maxsize=thread_count)
    threads = []
    for _ in range(thread_count):
        t = AsyncFn(upgrade_loop, queue, on_return.push(backend_factory()))
        # Don't wait for worker threads, gives deadlock if main thread
        # terminates with exception
        t.daemon = True
        t.start()
        threads.append(t)

    # Updating this value is prone to race conditions. However,
    # we don't care because this is for an approximate progress
    # output only.
    queue.rewrote_size = 0
    stamp = 0
    for (i, obj_id) in enumerate(yield_objects()):
        stamp2 = time.time()
        if stamp2 - stamp > 1:
            sys.stdout.write('\r..processed %d/%d objects (%.1f%%, %s rewritten)..'
                             % (i, total, i/total*100,
                                pretty_print_size(queue.rewrote_size)))
            sys.stdout.flush()
            stamp = stamp2

            # Terminate early if any thread failed with an exception
            for t in threads:
                if not t.is_alive():
                    t.join_and_raise()

        # Avoid blocking if all threads terminated
        while True:
            try:
                queue.put(obj_id, timeout=1)
            except QueueFull:
                pass
            else:
                break
            for t in threads:
                if not t.is_alive():
                    t.join_and_raise()

    queue.maxsize += len(threads)
    for t in threads:
        queue.put(None)

    for t in threads:
        t.join_and_raise()

    sys.stdout.write('\n')