def rebuild(): conf = request.app.config db_configs = get_database_configs(conf) dbpath = db_configs[DB_NAME]['path'] bpath = get_backup_path() start = time.time() db = request.db.content logging.debug('Locking database') db.acquire_lock() logging.debug('Acquiring global lock') with global_lock(always_release=True): db.commit() db.close() backup(dbpath, bpath) remove_dbfile() logging.debug('Removed database') db.reconnect() run_migrations(db, db_configs[DB_NAME]) logging.debug('Prepared new database') archive = Archive.setup(conf['library.backend'], request.app.supervisor.exts.fsal, db, contentdir=conf['library.contentdir'], meta_filenames=conf['library.metadata']) rows = archive.reload_content() logging.info('Restored metadata for %s pieces of content', rows) request.app.supervisor.exts.cache.invalidate('content') logging.debug('Released global lock') end = time.time() return end - start
def rebuild(): conf = request.app.config db_configs = get_database_configs(conf) dbpath = db_configs[DB_NAME]['path'] bpath = get_backup_path() start = time.time() db = request.db.content logging.debug('Locking database') db.acquire_lock() logging.debug('Acquiring global lock') with global_lock(always_release=True): db.commit() db.close() backup(dbpath, bpath) remove_dbfile() logging.debug('Removed database') db.reconnect() run_migrations(db, db_configs[DB_NAME]) logging.debug('Prepared new database') archive = Archive.setup(conf['library.backend'], request.app.supervisor.exts.fsal, db, contentdir=conf['library.contentdir'], meta_filenames=conf['library.metadata']) rows = archive.reload_content() logging.info('Restored metadata for %s pieces of content', rows) request.app.supervisor.exts.cache.invalidate('content') logging.debug('Released global lock') end = time.time() return end - start
def get_dbpath(): conf = request.app.config db_configs = get_database_configs(conf) return db_configs[DB_NAME]['path']
def get_dbpath(): conf = request.app.config db_configs = get_database_configs(conf) return db_configs[DB_NAME]['path']