def get_cache_size(): out = common.run_cmd("source ./scripts/reclaimable_docker_cache.sh 2> /dev/null", logger) size = 0 if len(out) == 0: logger.error("cannot retrieve cache size.") return size try: size = float(out[0]) except ValueError: logger.error("cannot convert cache size, reset size to 0") size = 0 return size
def testDeletedCmd(self): test_file = "/tmp/deleted_test.txt" def open_and_loop(): with open(test_file, "w"): while True: pass proc = multiprocessing.Process(target=open_and_loop) proc.start() time.sleep(1) os.remove("/tmp/deleted_test.txt") time.sleep(1) mock_logger = mock.Mock() cmd_out = run_cmd(check_deleted_files.DELETED_FILES_CMD, mock_logger) files = [f.split(" ")[1] for f in cmd_out[1:]] self.assertTrue(test_file in files) proc.terminate() proc.join()
def check_and_clean(threshold): if get_cache_size() > threshold: common.run_cmd("docker system prune -af", logger)
def check_and_clean(threshold): if get_cache_size() > threshold: # to avoid possible race condition, only clean the containers, images and networks created 1h ago common.run_cmd("docker system prune -af --filter until=1h", logger)