def rebuild():
    conf = request.app.config
    db_configs = get_database_configs(conf)
    dbpath = db_configs[DB_NAME]['path']
    bpath = get_backup_path()
    start = time.time()
    db = request.db.content
    logging.debug('Locking database')
    db.acquire_lock()
    logging.debug('Acquiring global lock')
    with global_lock(always_release=True):
        db.commit()
        db.close()
        backup(dbpath, bpath)
        remove_dbfile()
        logging.debug('Removed database')
        db.reconnect()
        run_migrations(db, db_configs[DB_NAME])
        logging.debug('Prepared new database')
        archive = Archive.setup(conf['library.backend'],
                                request.app.supervisor.exts.fsal,
                                db,
                                contentdir=conf['library.contentdir'],
                                meta_filenames=conf['library.metadata'])
        rows = archive.reload_content()
        logging.info('Restored metadata for %s pieces of content', rows)
    request.app.supervisor.exts.cache.invalidate('content')
    logging.debug('Released global lock')
    end = time.time()
    return end - start
Exemplo n.º 2
0
def open_archive(config=None):
    conf = config or request.app.config
    return Archive.setup(conf['library.backend'],
                         request.app.supervisor.exts.fsal,
                         request.db.content,
                         contentdir=conf['library.contentdir'],
                         meta_filenames=conf['library.metadata'])
def rebuild():
    conf = request.app.config
    db_configs = get_database_configs(conf)
    dbpath = db_configs[DB_NAME]['path']
    bpath = get_backup_path()
    start = time.time()
    db = request.db.content
    logging.debug('Locking database')
    db.acquire_lock()
    logging.debug('Acquiring global lock')
    with global_lock(always_release=True):
        db.commit()
        db.close()
        backup(dbpath, bpath)
        remove_dbfile()
        logging.debug('Removed database')
        db.reconnect()
        run_migrations(db, db_configs[DB_NAME])
        logging.debug('Prepared new database')
        archive = Archive.setup(conf['library.backend'],
                                request.app.supervisor.exts.fsal,
                                db,
                                contentdir=conf['library.contentdir'],
                                meta_filenames=conf['library.metadata'])
        rows = archive.reload_content()
        logging.info('Restored metadata for %s pieces of content', rows)
    request.app.supervisor.exts.cache.invalidate('content')
    logging.debug('Released global lock')
    end = time.time()
    return end - start
        def wrapper(path, **kwargs):
            path = urlunquote(path)
            conf = request.app.config
            archive = Archive.setup(
                conf["library.backend"],
                request.app.supervisor.exts.fsal,
                request.db.content,
                contentdir=conf["library.contentdir"],
                meta_filenames=conf["library.metadata"],
            )
            content = archive.get_single(path)
            if not content:
                if abort_if_not_found:
                    abort(404)
                return func(path=path, meta=None, **kwargs)

            meta = metadata.Meta(request.app.supervisor, content.path, data=content)
            return func(path=path, meta=meta, **kwargs)