def _archive_logs(self): cutoff_date = datetime.now() - STALE_AFTER try: with GlobalLock("ACTION_LOG_ROTATION"): self._perform_archiving(cutoff_date) except LockNotAcquiredException: return
def _garbage_collection_repos(self, skip_lock_for_testing=False): """ Performs garbage collection on repositories. """ with UseThenDisconnect(app.config): policy = get_random_gc_policy() if policy is None: logger.debug("No GC policies found") return repo_ref = registry_model.find_repository_with_garbage(policy) if repo_ref is None: logger.debug("No repository with garbage found") return assert features.GARBAGE_COLLECTION try: with GlobalLock( "REPO_GARBAGE_COLLECTION_%s" % repo_ref.id, lock_ttl=REPOSITORY_GC_TIMEOUT + LOCK_TIMEOUT_PADDING, ) if not skip_lock_for_testing else empty_context(): try: repository = Repository.get(id=repo_ref.id) except Repository.DoesNotExist: return logger.debug("Starting GC of repository #%s (%s)", repository.id, repository.name) garbage_collect_repo(repository) logger.debug("Finished GC of repository #%s (%s)", repository.id, repository.name) except LockNotAcquiredException: logger.debug( "Could not acquire repo lock for garbage collection")
def process_queue_item(self, job_details): try: with GlobalLock( "LARGE_GARBAGE_COLLECTION", lock_ttl=NAMESPACE_GC_TIMEOUT + LOCK_TIMEOUT_PADDING ): self._perform_gc(job_details) except LockNotAcquiredException: logger.debug("Could not acquire global lock for garbage collection")
def _try_report_stats(self): logger.debug("Attempting to report stats") try: with GlobalLock("GLOBAL_PROM_STATS"): self._report_stats() except LockNotAcquiredException: logger.debug("Could not acquire global lock for global prometheus stats")
def _try_cleanup_uploads(self): """ Performs garbage collection on the blobupload table. """ try: with GlobalLock("BLOB_CLEANUP", lock_ttl=LOCK_TTL): self._cleanup_uploads() except LockNotAcquiredException: logger.debug("Could not acquire global lock for blob upload cleanup worker") return
def _try_cleanup_uploads(self): """ Performs garbage collection on the blobupload table. Will also perform garbage collection on the uploads folder in the S3 bucket, if applicable. """ try: with GlobalLock("BLOB_CLEANUP", lock_ttl=LOCK_TTL): self._cleanup_uploads() if app.config.get("CLEAN_BLOB_UPLOAD_FOLDER", False): self._try_clean_partial_uploads() except LockNotAcquiredException: logger.debug( "Could not acquire global lock for blob upload cleanup worker")
def _index_recent_manifests_in_scanner(self): batch_size = app.config.get("SECURITY_SCANNER_V4_RECENT_MANIFEST_BATCH_SIZE", 1000) if not app.config.get("SECURITY_SCANNER_V4_SKIP_RECENT_MANIFEST_BATCH_LOCK", False): try: with GlobalLock( "SECURITYWORKER_INDEX_RECENT_MANIFEST", lock_ttl=300, auto_renewal=True ): self._model.perform_indexing_recent_manifests(batch_size) except LockNotAcquiredException: logger.warning( "Could not acquire global lock for recent manifest indexing. Skipping" ) else: self._model.perform_indexing_recent_manifests(batch_size)