def main(): common.setup_logging() cleaner = Cleaner() cache_worker = Worker(clean_docker_cache.check_and_clean, 10, timeout=timedelta(minutes=5)) cleaner.add_worker("clean_docker_cache", cache_worker) cleaner.start() cleaner.sync()
def main(): parser = argparse.ArgumentParser() parser.add_argument("option", help="the functions currently supported: [docker_cache | deleted_files]") args = parser.parse_args() common.setup_logging() cleaner = Cleaner(liveness_files[args.option]) cleaner.add_worker(args.option, get_worker(args.option)) cleaner.start() cleaner.sync()
def main(): parser = argparse.ArgumentParser() parser.add_argument("-t", "--threshold", help="the disk usage precent to start cleaner") parser.add_argument("-i", "--interval", help="the base interval to check disk usage") args = parser.parse_args() common.setup_logging() cleaner = DockerCleaner(args.threshold, args.interval, timedelta(minutes=10)) cleaner.run()
def setUp(self): common.setup_logging()
from cleaner.utils import common import multiprocessing logger = multiprocessing.get_logger() def get_cache_size(): out = common.run_cmd( "source ./scripts/reclaimable_docker_cache.sh 2> /dev/null", logger) size = 0 if len(out) == 0: logger.error("cannot retrieve cache size.") return size try: size = float(out[0]) except ValueError: logger.error("cannot convert cache size, reset size to 0") size = 0 return size def check_and_clean(threshold): if get_cache_size() > threshold: common.run_cmd("docker system prune -af", logger) if __name__ == "__main__": common.setup_logging() check_and_clean(10)
def setUp(self): setup_logging()