def reset_prometheus(getpid=os.getpid): """Reset prometheus file cache. MultiProcessValue keeps a files cache which is only accessible via a closure. So the only way to clear the cache is to recreate the closure, which is what we do here. """ try: from prometheus_client import values values.ValueClass = values.MultiProcessValue(getpid) except ImportError: pass # prometheus is optional
class MultiprocessMutexValue(values.MultiProcessValue()): """ MultiprocessValue protected by mutex Rucio usually is deployed using the apache MPM module, which means that it both uses multiple subprocesses, and multiple threads per subprocess. """ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._lock = Lock() def inc(self, amount): with self._lock: return super().inc(amount) def set(self, value): with self._lock: return super().set(value) def get(self): with self._lock: return super().get()
import os from prometheus_client import values # Override pid function if we have a reusable gunicorn worker ID if os.environ.get('prometheus_multiproc_dir', None): # noqa try: values.ValueClass = values.MultiProcessValue( process_identifier=lambda: os.environ.get('APP_WORKER_ID', 1000), ) except TypeError: values.ValueClass = values.MultiProcessValue( _pidFunc=lambda: os.environ.get('APP_WORKER_ID', 1000), ) # Import all files that define metrics. This has the effect that # `import django_prometheus` will always instanciate all metric # objects right away. import django_prometheus.middleware import django_prometheus.models # Import pip_prometheus to export the pip metrics automatically. try: import pip_prometheus except ImportError: # If people don't have pip, don't export anything. pass try: # Load celery exporter if possible import django_prometheus.celery except ImportError: pass default_app_config = 'django_prometheus.apps.DjangoPrometheusConfig'
def get_label_value(self): """ :return: the label value :rtype: str """ return self.name.lower() REGISTRY = prometheus_client.CollectorRegistry() if "prometheus_multiproc_dir" in os.environ: # For why this is needed see: # https://github.com/prometheus/client_python/issues/275#issuecomment-504755024 import uwsgi prometheus_client.values.ValueClass = values.MultiProcessValue( uwsgi.worker_id) multiprocess.MultiProcessCollector(REGISTRY) APPLY = ActionMetrics('apply', 'apply a manifest', ['manifest']) # TODO: Ideally include an action (ChartDeployAction) label, but that's not # determined until after chart handling starts. CHART_HANDLE = ChartHandleMetrics( 'chart_handle', 'handle a chart (including delete, deploy, test (all as necessary) but ' 'not download)', ['manifest', 'chart']) CHART_DOWNLOAD = ActionMetrics( 'chart_download', 'download a chart (will be noop if previously cached)', ['manifest', 'chart']) CHART_DELETE = ActionMetrics('chart_delete', 'delete a chart', ['manifest', 'chart'])
def setUpClass(cls): cls.temp_dir = TemporaryDirectory() os.environ["prometheus_multiproc_dir"] = cls.temp_dir.name cls.patch_value_class = unittest.mock.patch.object( values, "ValueClass", values.MultiProcessValue(cls.current_test)) cls.patch_value_class.start()
# remove all *container*.db files since we're restarting the process files = glob( f"/{getenv('PROMETHEUS_MULTIPROC_DIR')}/*{audius_prometheus_container}*") for f in files: logger.info(f"Removing prometheus file: {f}") remove(f) # since the server and worker containers share ${PROMETHEUS_MULTIPROC_DIR}/, # will ensure each container uses its own prefix to avoid pid collisions between the # two containers when using the prometheus-client in multi-process mode def process_identifier(): return f"{audius_prometheus_container}_{getpid()}" values.ValueClass = values.MultiProcessValue( process_identifier=process_identifier) @bp.route("/prometheus_metrics", methods=["GET"]) def prometheus_metrics_exporter(): PrometheusMetric.populate_collectors() registry = CollectorRegistry() multiprocess.MultiProcessCollector(registry) data = generate_latest(registry) headers = { "Content-type": CONTENT_TYPE_LATEST, "Content-Length": str(len(data)), } return Response(data, headers=headers, mimetype=CONTENT_TYPE_LATEST)