def child_exit(server, worker): if os.environ.get("prometheus_multiproc_dir", False): from prometheus_flask_exporter.multiprocess import ( GunicornInternalPrometheusMetrics, ) GunicornInternalPrometheusMetrics.mark_process_dead_on_child_exit( worker.pid)
def initialise_prometheus(app, log=None): if os.environ.get("PROMETHEUS_MULTIPROC_DIR", False): metrics = GunicornInternalPrometheusMetrics(app) if log: log.info("Prometheus metrics enabled") return metrics return None
def enable_prometheus(): # Enable deployment specific code for Prometheus metrics if os.environ.get("prometheus_multiproc_dir", False): from prometheus_flask_exporter.multiprocess import ( GunicornInternalPrometheusMetrics, ) metrics = GunicornInternalPrometheusMetrics(app) _LOG.info(f"Prometheus metrics enabled : {metrics}")
def initialise_prometheus(app, log=None): # Prometheus if os.environ.get("prometheus_multiproc_dir", False): metrics = GunicornInternalPrometheusMetrics(app) if log: log.info("Prometheus metrics enabled") return metrics return None
def activate_prometheus_exporter(app): metrics = GunicornInternalPrometheusMetrics(app, export_defaults=False) endpoint = app.view_functions histogram = metrics.histogram( 'mlflow_requests_by_status_and_path', 'Request latencies and count by status and path', labels={ 'status': lambda r: r.status_code, 'path': lambda: change_path_for_metric(request.path) }) for func_name, func in endpoint.items(): if func_name in [ "_search_runs", "_log_metric", "_log_param", "_set_tag", "_create_run" ]: app.view_functions[func_name] = histogram(func) return app
def app() -> Flask: app = create_app('myapp.config.TestConfig') prometheus_client.REGISTRY = prometheus_client.CollectorRegistry( auto_describe=True) myapp_extensions.metrics = GunicornInternalPrometheusMetrics.for_app_factory( group_by="endpoint") ctx = app.app_context() ctx.push() yield app ctx.pop()
def activate_prometheus_exporter(app): def mlflow_version(_: request): return VERSION metrics = GunicornInternalPrometheusMetrics( app, export_defaults=True, defaults_prefix="mlflow", excluded_paths=["/health"], group_by=mlflow_version, ) return metrics
def init_prometheus_flask_exporter(self, app): enable_exporter_flask = app.config.get( "PROMETHEUS_ENABLE_EXPORTER_FLASK", False) if not enable_exporter_flask: LOGGER.debug( f"Prometheus Flask exporter is not enabled for {app.name}.") return prefix = app.name metrics_flask = GunicornInternalPrometheusMetrics.for_app_factory( defaults_prefix=prefix, group_by=url_rule) metrics_flask.init_app(app) LOGGER.debug( f"Prometheus Flask exporter is initialized with prefix {prefix}.")
def init_metrics(app): """ Initialize the Prometheus Flask Exporter. :return: a Prometheus Flash Metrics object :rtype: PrometheusMetrics """ registry = CollectorRegistry() multiproc_temp_dir = app.config["PROMETHEUS_METRICS_TEMP_DIR"] hostname = socket.gethostname() if not os.path.isdir(multiproc_temp_dir): os.makedirs(multiproc_temp_dir) multiprocess.MultiProcessCollector(registry, path=multiproc_temp_dir) metrics = GunicornInternalPrometheusMetrics.for_app_factory( default_labels={"host": hostname}, group_by="endpoint", defaults_prefix="cachito_flask" ) metrics.init_app(app)
import os from flask import request from mlflow.pyfunc import scoring_server, load_model from prometheus_flask_exporter.multiprocess import GunicornInternalPrometheusMetrics app = scoring_server.init(load_model(os.getenv('MODEL_PATH'))) metrics = GunicornInternalPrometheusMetrics(app, defaults_prefix=os.getenv('STATSD_PREFIX')) metrics.register_default( metrics.counter( 'by_path_counter', 'Request count by request paths', labels={'path': lambda: request.path} ) )
def child_exit(server, worker): GunicornInternalPrometheusMetrics.mark_process_dead_on_child_exit(worker.pid)
def when_ready(server): GunicornInternalPrometheusMetrics.start_http_server_when_ready(8080)
def create_app(): """Create Connexion/Flask application.""" root = os.path.dirname(rhub.__path__[0]) connexion_app = connexion.App(__name__) flask_app = connexion_app.app flask_app.url_map.strict_slashes = False if os.getenv('PROMETHEUS_MULTIPROC_DIR'): GunicornInternalPrometheusMetrics(flask_app) from . import _config flask_app.config.from_object(_config) parser = prance.ResolvingParser( os.path.join(root, 'openapi', 'openapi.yml')) connexion_app.add_api( parser.specification, validate_responses=True, strict_validation=True, pythonic_params=True, ) # Enable CORS (Cross-Origin Resource Sharing) # https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS CORS(flask_app) flask_app.cli.add_command(init_command) db.init_app(flask_app) migrate.init_app(flask_app, db) try: import coloredlogs coloredlogs.install(level=flask_app.config['LOG_LEVEL']) except ImportError: logging.basicConfig(level=flask_app.config['LOG_LEVEL']) RHUB_RETURN_INITIAL_FLASK_APP = os.getenv('RHUB_RETURN_INITIAL_FLASK_APP', 'False') if str(RHUB_RETURN_INITIAL_FLASK_APP).lower() == 'true': return flask_app FlaskInjector( app=flask_app, injector=di, modules=[ KeycloakModule(flask_app), VaultModule(flask_app), SchedulerModule(flask_app), ], ) # Try to retrieve Tower notification webhook creds from vault try: with flask_app.app_context(): vault = di.get(Vault) webhookCreds = vault.read(flask_app.config['WEBHOOK_VAULT_PATH']) if webhookCreds: flask_app.config['WEBHOOK_USER'] = webhookCreds['username'] flask_app.config['WEBHOOK_PASS'] = webhookCreds['password'] else: raise Exception( 'Missing tower webhook notification credentials; ' f'{vault} {flask_app.config["WEBHOOK_VAULT_PATH"]}') except Exception as e: logger.error( f'Failed to load {flask_app.config["WEBHOOK_VAULT_PATH"]} tower' f' webhook notification credentials {e!s}.') return flask_app
def __init__(self, app, config): self.config = config if os.getenv('prometheus_multiproc_dir'): self.gunicorn_metrics = GunicornInternalPrometheusMetrics( app, path="/metrics")
def init_metrics(app): myapp = GunicornInternalPrometheusMetrics(app, registry=CollectorRegistry())
def child_exit(server, worker): # pylint: disable=unused-argument """ Clean up prometheus metrics when a worker dies """ GunicornInternalPrometheusMetrics.mark_process_dead_on_child_exit(worker.pid)
docs.register(date) @app.route("/api/predict/coreness", methods=["POST"]) @use_kwargs({ 'title': fields.Str(required=True), 'abstract': fields.Str(required=True) }) @marshal_with(serializers.ClassifierOutputSerializer) def core_classifier(**kwargs): """Endpoint for the CORE classifier.""" return predict_coreness(kwargs['title'], kwargs['abstract']) docs.register(core_classifier) return app @app.errorhandler(404) def page_not_found(e): return {"errors": [str(e)]}, 404 app = create_app() if app.config.get('PROMETHEUS_ENABLE_EXPORTER_FLASK'): logging.info("Starting prometheus metrics exporter") metrics = GunicornInternalPrometheusMetrics.for_app_factory() metrics.init_app(app) if __name__ == '__main__': app.run(host='0.0.0.0')
from flask import Flask from prometheus_flask_exporter.multiprocess import GunicornInternalPrometheusMetrics application = Flask(__name__) metrics = GunicornInternalPrometheusMetrics(application) # static information as metric metrics.info('app_info', 'Application info', version='1.0.3') @application.route('/test') def main(): raise Exception("Crashing") pass # requests tracked by default if __name__ == '__main__': application.run(debug=False, port=5000)
from prometheus_flask_exporter.multiprocess import GunicornInternalPrometheusMetrics metrics = GunicornInternalPrometheusMetrics(app=None, group_by="endpoint") def setup_extensions(app): metrics.init_app(app) return app
app = Flask(__name__.split('.')[0]) RequestID(app) # Parse config file if not os.environ.get("DEFER_CFG_PARSE"): get_config() # If invoked using Gunicorn, link our root logger to the gunicorn logger # this will mean the root logs will be captured and managed by the gunicorn logger # allowing you to set the gunicorn log directories and levels for logs # produced by this application _LOG.setLevel(logging.getLogger('gunicorn.error').getEffectiveLevel()) if os.environ.get("prometheus_multiproc_dir", False): metrics = GunicornInternalPrometheusMetrics(app) _LOG.info("Prometheus metrics enabled") if os.environ.get("AWS_DEFAULT_REGION"): unsigned = bool(os.environ.get("AWS_NO_SIGN_REQUEST", "yes")) set_default_rio_config(aws=dict(aws_unsigned=unsigned, region_name="auto"), cloud_defaults=True) else: set_default_rio_config() _LOG.warning("Environment variable $AWS_DEFAULT_REGION not set. (This warning can be ignored if all data is stored locally.)") # Suppress annoying rasterio warning message every time we write to a non-georeferenced image format warnings.simplefilter("ignore", category=NotGeoreferencedWarning) class SupportedSvcVersion(object):
from flask import Flask from prometheus_flask_exporter.multiprocess import GunicornInternalPrometheusMetrics app = Flask(__name__) metrics = GunicornInternalPrometheusMetrics(app) @app.route('/test') def index(): return 'Hello world' @app.route('/error') def error(): raise Exception('Fail') if __name__ == '__main__': app.run(debug=False, port=5000)
from flask import Flask, render_template from prometheus_flask_exporter.multiprocess import GunicornInternalPrometheusMetrics application = Flask(__name__, static_folder="./public", template_folder="./templates") metrics = GunicornInternalPrometheusMetrics(application) @application.route("/") def index(): return render_template("index.html") @application.route("/click-button", methods=["POST"]) @metrics.counter("demo_app_button_clicks", "Number of button presses by user") def web_button(): return {}
from prometheus_flask_exporter.multiprocess import GunicornInternalPrometheusMetrics metrics = GunicornInternalPrometheusMetrics.for_app_factory( group_by="endpoint") def setup_extensions(app): metrics.init_app(app) return app
app = Flask(__name__) metrics.init_app(app) # Add prometheus wsgi middleware to route /metrics requests app.wsgi_app = DispatcherMiddleware( app.wsgi_app, {'/metrics': make_wsgi_app(registry=REGISTRY)}) init() @app.route('/test') def main(): return 'Ok' return app REGISTRY.register(CustomCollector()) metrics = GunicornInternalPrometheusMetrics.for_app_factory(path='/metrics', static_labels={ 'node': 'xxx', 'pod': 'xx', 'version': 'xx' }, registry=REGISTRY) if __name__ == '__main__': options = {'bind': ['0.0.0.0:9200'], 'workers': 4, 'loglevel': 'debug'} std_app = StandaloneApplication(create_app(), options) std_app.run()