else: return errors if __name__ == '__main__': sys.path.insert(0, os.path.realpath('%s/../..' % os.path.dirname(os.path.realpath(__file__)))) import autoconf settings = autoconf.settings logger = autoconf.logger import plugins from plugins import redis_db plugins.install_plugin('db', redis_db, autoconf.settings) from plugins.ucnk_conc_persistence2 import KEY_ALPHABET, PERSIST_LEVEL_KEY parser = argparse.ArgumentParser(description='Archive old records from Synchronize data from mysql db to redis') parser.add_argument('-k', '--key-prefix', type=str, help='Processes just keys with defined prefix') parser.add_argument('-c', '--cron-interval', type=int, help='Non-empty values initializes partial processing with ' 'defined interval between chunks') parser.add_argument('-d', '--dry-run', action='store_true', help='allows running without affecting storage data') parser.add_argument('-l', '--log-file', type=str, help='A file used for logging. If omitted then stdout is used') args = parser.parse_args() autoconf.setup_logger(log_path=args.log_file, logger_name='conc_archive') run(conf=settings, key_prefix=args.key_prefix, cron_interval=args.cron_interval, dry_run=args.dry_run, persist_level_key=PERSIST_LEVEL_KEY, key_alphabet=KEY_ALPHABET)
if __name__ == '__main__': import time parser = argparse.ArgumentParser(description='Check for changes in UCNK database and' 'synchronize with KonText') parser.add_argument('conf_path', metavar='CONF_PATH', type=str, help='Path to a config file') parser.add_argument('-t', '--interval', type=int, default=DEFAULT_CHECK_INTERVAL, help='how often (in minutes) script runs (default=%d)' % DEFAULT_CHECK_INTERVAL) parser.add_argument('-d', '--dry-run', action='store_true', help='allows running without affecting storage data') args = parser.parse_args() conf = json.load(open(args.conf_path)) autoconf.setup_logger(log_path=conf['logging']['path'], logger_name='user_db_sync', logging_level=autoconf.LOG_LEVELS[conf['logging']['level']]) mysql_params = ucnk_remote_auth2.create_auth_db_params( autoconf.settings.get('plugins', 'auth')) mysql_params.update(dict(charset='utf8', use_unicode=True)) redis_params = autoconf.settings.get('plugins', 'db') redis_params = dict(host=redis_params['default:host'], port=redis_params['default:port'], db_id=redis_params['default:id']) w = DbSync(mysql_conn=ucnk_remote_auth2.connect_auth_db(**mysql_params), redis_params=redis_params, check_interval=args.interval, db_name=mysql_params['db'], default_user_corpora=conf.get('default_corpora', ('susanne',))) t = time.time()
description= 'Archive old records from Synchronize data from mysql db to redis') parser.add_argument('-k', '--key-prefix', type=str, help='Processes just keys with defined prefix') parser.add_argument( '-c', '--cron-interval', type=int, help='Non-empty values initializes partial processing with ' 'defined interval between chunks') parser.add_argument('-d', '--dry-run', action='store_true', help='allows running without affecting storage data') parser.add_argument( '-l', '--log-file', type=str, help='A file used for logging. If omitted then stdout is used') args = parser.parse_args() autoconf.setup_logger(log_path=args.log_file, logger_name='conc_archive') run(conf=settings, key_prefix=args.key_prefix, cron_interval=args.cron_interval, dry_run=args.dry_run, persist_level_key=PERSIST_LEVEL_KEY, key_alphabet=KEY_ALPHABET)
from plugins import default_conc_cache parser = argparse.ArgumentParser(description='A script to control UCNK metadata cache') parser.add_argument('--dry-run', '-d', action='store_true', help='Just analyze, do not modify anything') parser.add_argument('--exclude', '-x', type=str, default=None, help='Do not analyze/clean specified subdirectories') parser.add_argument('--corpus', '-c', type=str, help='A concrete corpus to be processed') parser.add_argument('--ttl', '-t', type=int, default=DEFAULT_TTL, help='How old files (in minutes) will be preserved yet. Default is %s min.' % DEFAULT_TTL) parser.add_argument('--subdir', '-s', type=str, default=None, help='Search will be performed in [default:cache_dir]/[subdir]') parser.add_argument('--log-level', '-l', type=str, default='info', help='Logging level (%s)' % ', '.join(autoconf.LOG_LEVELS.keys())) parser.add_argument('--log-path', '-p', type=str, default=None, help='Where to write the log. If omitted then %s is used' % autoconf.DEFAULT_LOG_OUT) args = parser.parse_args() autoconf.setup_logger(log_path=args.log_path, logger_name='conc_cache_cleanup', logging_level=autoconf.LOG_LEVELS[args.log_level]) root_dir = autoconf.settings.get('plugins', 'conc_cache')['default:cache_dir'] run(root_dir=root_dir, corpus_id=args.corpus, ttl=args.ttl, subdir=args.subdir, dry_run=args.dry_run, cache_map_filename=default_conc_cache.CacheMapping.CACHE_FILENAME, locking_plugin=plugins.get('locking'))
def mk_key(corpus_id): return RedisCacheMapping.KEY_TEMPLATE % corpus_id parser = argparse.ArgumentParser(description='A script to control UCNK metadata cache') parser.add_argument('--dry-run', '-d', action='store_true', help='Just analyze, do not modify anything') parser.add_argument('--exclude', '-x', type=str, default=None, help='Do not analyze/clean specified subdirectories') parser.add_argument('--corpus', '-c', type=str, help='A concrete corpus to be processed') parser.add_argument('--ttl', '-t', type=int, default=cleanup.DEFAULT_TTL, help='How old files (in minutes) will be preserved yet. Default is %s min.' % cleanup.DEFAULT_TTL) parser.add_argument('--subdir', '-s', type=str, default=None, help='Search will be performed in [default:cache_dir]/[subdir]') parser.add_argument('--log-level', '-l', type=str, default='info', help='Logging level (%s)' % ', '.join(autoconf.LOG_LEVELS.keys())) parser.add_argument('--log-path', '-p', type=str, default=None, help='Where to write the log. If omitted then %s is used' % autoconf.DEFAULT_LOG_OUT) args = parser.parse_args() autoconf.setup_logger(log_path=args.log_path, logger_name='conc_cache_cleanup', logging_level=autoconf.LOG_LEVELS[args.log_level]) root_dir = autoconf.settings.get('plugins', 'conc_cache')['default:cache_dir'] cleanup.run(root_dir=root_dir, corpus_id=args.corpus, ttl=args.ttl, subdir=args.subdir, dry_run=args.dry_run, db_plugin=plugins.runtime.DB.instance, entry_key_gen=mk_key)
print('+-------------------------------+') print('| removing cache data |') print('+-------------------------------+') max_len = max([len(x) for x in corplist]) for corp in sorted(corplist): db = live_attrs.db(corp) result = _clear_cache_in_db(db) print('%s%s -> %s' % (corp, ' ' * (max_len - len(corp)), result)) ACTION_MAP = { 'status': print_status, 'clear': clear_cache } if __name__ == '__main__': autoconf.setup_logger(logger_name='conc_archive') parser = argparse.ArgumentParser(description='A script to control UCNK metadata cache') parser.add_argument('action', metavar='ACTION', help='one of {status, clear}') args = parser.parse_args() ctree = corparch.create_instance(autoconf.settings) ctree.setup(lang='en') live_attrs = ucnk_live_attributes.create_instance(ctree, autoconf.settings) corplist = get_corplist(ctree) try: ACTION_MAP[args.action](corplist, live_attrs) except KeyError: print('Unknown action "%s"' % args.action) sys.exit(1)