this is useful when utilizing gunicorn's hot reload in local dev. utilizing this method will enforce a 1:1 quay worker to gunicorn worker ratio. """ gc_worker = RepositoryGCWorker( repository_gc_queue, poll_period_seconds=POLL_PERIOD_SECONDS, reservation_seconds=REPOSITORY_GC_TIMEOUT, ) worker = GunicornWorker(__name__, app, gc_worker, features.REPOSITORY_GARBAGE_COLLECTION) return worker if __name__ == "__main__": logging.config.fileConfig(logfile_path(debug=False), disable_existing_loggers=False) if not features.REPOSITORY_GARBAGE_COLLECTION: logger.info("Repository garbage collection is disabled; skipping") while True: time.sleep(100000) logger.debug("Starting repository GC worker") worker = RepositoryGCWorker( repository_gc_queue, poll_period_seconds=POLL_PERIOD_SECONDS, reservation_seconds=REPOSITORY_GC_TIMEOUT, ) worker.start()
# NOTE: Must be before we import or call anything that may be synchronous. from gevent import monkey monkey.patch_all() import sys import os sys.path.append(os.path.join(os.path.dirname(__file__), "../")) import logging from util.log import logfile_path from util.workers import get_worker_count, get_worker_connections_count logconfig = logfile_path(debug=False) bind = "unix:/tmp/gunicorn_registry.sock" workers = get_worker_count("registry", 4, minimum=8, maximum=64) worker_class = "gevent" worker_connections = get_worker_connections_count("registry") pythonpath = "." preload_app = True def when_ready(server): logger = logging.getLogger(__name__) logger.debug( "Starting registry gunicorn with %s workers and %s worker class", workers, worker_class)
continue except TagManifestLabelMap.DoesNotExist: pass try: manifest_label = ManifestLabel.create(manifest=manifest, label=label, repository=repository) TagManifestLabelMap.create( manifest_label=manifest_label, tag_manifest_label=tag_manifest_label, label=label, manifest=manifest, tag_manifest=tag_manifest_label.annotated) except IntegrityError: continue if __name__ == "__main__": logging.config.fileConfig(logfile_path(debug=False), disable_existing_loggers=False) if (not app.config.get('BACKFILL_TAGS', False) and app.config.get('V3_UPGRADE_MODE') != 'background'): logger.debug('Tag backfill disabled; skipping') while True: time.sleep(100000) worker = TagBackfillWorker(app.config.get('BACKFILL_TAGS_NAMESPACE')) worker.start()
import sys import os sys.path.append(os.path.join(os.path.dirname(__file__), "../")) import logging from util.log import logfile_path from util.workers import get_worker_count, get_worker_connections_count logconfig = logfile_path(debug=True) bind = "0.0.0.0:5000" workers = get_worker_count("local", 2, minimum=2, maximum=8) worker_class = "gevent" worker_connections = get_worker_connections_count("local") daemon = False pythonpath = "." if os.getenv("QUAY_HOTRELOAD", "false") == "true": reload = True reload_engine = "auto" else: preload_app = True def when_ready(server): logger = logging.getLogger(__name__) logger.debug("Starting local gunicorn with %s workers and %s worker class", workers, worker_class)
import logging from config_app import config_web from config_app.c_app import app as application from util.log import logfile_path if __name__ == "__main__": logging.config.fileConfig(logfile_path(debug=True), disable_existing_loggers=False) application.run(port=5000, debug=True, threaded=True, host="0.0.0.0")
def test_logfile_path_default(): assert logfile_path() == os.path.join(CONF_DIR, "logging.conf")
def test_logfile_path_env(debug, jsonfmt, expected, monkeypatch): monkeypatch.setenv("DEBUGLOG", debug) monkeypatch.setenv("JSONLOG", jsonfmt) assert logfile_path() == expected
def test_logfile_path(debug, jsonfmt, expected, monkeypatch): assert logfile_path(jsonfmt=jsonfmt, debug=debug) == expected