def machines(expired_for=0): if expired_for != 'all': adjusted_time = time.time() - units.parse_time_interval(expired_for) for root, dirs, files in os.walk(path.MACHINES, topdown=False): for d in (os.path.join(root, x) for x in dirs): lock_path = os.path.join(root, '.' + os.path.basename(d) + '-lock') lock = fasteners.process_lock.InterProcessLock(lock_path) lock.acquire() try: remove = fingertip.machine.needs_a_rebuild(d, by=adjusted_time) except Exception as ex: log.warning(f'while processing {d}: {ex}') remove = True if (expired_for == 'all' or remove): assert os.path.realpath(d).startswith( os.path.realpath(path.MACHINES)) log.info(f'removing {os.path.realpath(d)}') if not os.path.islink(d): shutil.rmtree(d) else: os.unlink(d) else: log.debug(f'keeping {os.path.realpath(d)}') os.unlink(lock_path) lock.release()
def tempfiles(older_than='6h', location=None): location = location or tempfile.gettempdir() cutoff_time = time.time() - units.parse_time_interval(older_than) _cleanup_dir( location, lambda f: (_time(f) >= cutoff_time or temp.AUTOREMOVE_PREFIX not in f))
def logs(older_than=0): cutoff_time = time.time() - units.parse_time_interval(older_than) _cleanup_dir(path.LOGS, lambda f: _time(f) >= cutoff_time)
def downloads(older_than=0): cutoff_time = time.time() - units.parse_time_interval(older_than) _cleanup_dir(path.DOWNLOADS, lambda f: _time(f) >= cutoff_time)
def cap(self, interval): self.time = min(self.time, time.time() + units.parse_time_interval(interval))
def __init__(self, expire_in): self.time = time.time() + units.parse_time_interval(expire_in) self._deps_files, self._deps_dirs = {}, {}