def create_app(config_name): app = Flask(__name__) app.config.from_object(config[config_name]) config[config_name].init_app(app) if os.environ.get('SQLALCHEMY_POOL_SIZE'): # 上传进程内部加载sqlalchemy时需要控制每个进程的数据库连接池大小 print('根据环境变量设置数据库连接池大小为:', os.environ.get('SQLALCHEMY_POOL_SIZE')) app.config['SQLALCHEMY_POOL_SIZE'] = int( os.environ.get('SQLALCHEMY_POOL_SIZE')) db.init_app(app) print('connect to {}'.format(app.config['SQLALCHEMY_DATABASE_URI'])) try: aios_redis.init_app(app) print('connect to {}'.format(app.config['REDIS_URL'])) # 每次项目重启时清空缓存,如有redis实现的消息队列时,需要采取措施及时保存队列信息,以便不丢失消息 init_pubsub(aios_redis) # aios_redis.flushdb() except Exception as err: print('warning: Redis服务出现异常!!!', err) try: from prometheus_flask_exporter import PrometheusMetrics metrics = PrometheusMetrics(app) # static information as metric metrics.info('ifaios_metrics', 'ifaios_cs', version="v2") except Exception as err: pass # 注册蓝本 from app.api import api_blueprint app.register_blueprint(api_blueprint) from app.static import static_blueprint app.register_blueprint(static_blueprint) from app.common import common_blueprint app.register_blueprint(common_blueprint) return app
def create_app(test_config=None): # create and configure the app app = Flask(__name__, static_folder="build/static", template_folder="build") dir_name = "logs" try: # Create log Directory os.mkdir(dir_name) print("Directory ", dir_name, " Created ") except FileExistsError: print("Directory ", dir_name, " already exists") # app.logger.removeHandler(default_handler) handler = RotatingFileHandler("logs/orders.log", maxBytes=10000, backupCount=1) handler.setLevel(logging.DEBUG) app.logger.addHandler(handler) es_host_url = os.getenv("ES_HOST_URL") # if es url is set, enable logging to elasticsearch if es_host_url: es_handler = ElasticsearchLogHandler(os.getenv("ES_HOST_URL")) app.logger.addHandler(es_handler) app.logger.setLevel(logging.DEBUG) app.config.from_object("config") app.register_blueprint(mock) app.register_blueprint(index_blueprint) # set up prometheus metrics exporting metrics = PrometheusMetrics(app) # static information as metric metrics.info("AnomalyDetectorDemo", "Demo application for PAD/LAD", version="0.1") return app
def init_flask_metrics(flask_app, export_defaults=True, **kwargs): global flask_metrics, enabled try: localconfig = anchore_engine.configuration.localconfig.get_config() metrics_config = localconfig.get('metrics', {}) enabled = bool(metrics_config.get('enable', False)) if not enabled: enabled = bool(metrics_config.get('enabled', False)) except Exception as err: logger.warn( "unable to determine if metrics are enabled - exception: " + str(err)) enabled = False if not enabled: flask_metrics = disabled_flask_metrics() return (True) if not flask_metrics: flask_metrics = PrometheusMetrics(flask_app, export_defaults=export_defaults) flask_metrics.info('anchore_service_info', "Anchore Service Static Information", version=version, **kwargs) return (True)
def init_flask_metrics(flask_app, export_defaults=True, **kwargs): global flask_metrics, enabled try: localconfig = anchore_engine.configuration.localconfig.get_config() metrics_config = localconfig.get('metrics', {}) enabled = bool(metrics_config.get('enable', False)) if not enabled: enabled = bool(metrics_config.get('enabled', False)) except Exception as err: logger.warn( "unable to determine if metrics are enabled - exception: " + str(err)) enabled = False if not enabled: flask_metrics = disabled_flask_metrics() return (True) if not flask_metrics: # Build a blueprint for metrics, wrapped in auth flask_metrics = PrometheusMetrics(metrics_blueprint, export_defaults=export_defaults) # Note: this must be after the addition of PrometheusMetrics to the blueprint in order to ensure proper ordering of before_request and after_request handling by prometheus counters metrics_blueprint.before_request(auth_function_factory()) flask_app.register_blueprint(metrics_blueprint) flask_metrics.info('anchore_service_info', "Anchore Service Static Information", version=version, **kwargs) return (True)
class Server: def __init__(self,log_level=logging.DEBUG): self.app = Flask(__name__) self.app.logger.setLevel(log_level) self.metrics = PrometheusMetrics(self.app) self.metrics.info('app_info', 'Version info', version=__version__) self.cv = ComputerVision() self.app.register_blueprint(self.cv.blueprint, url_prefix='/v1/') self.app.config['SWAGGER'] = { 'title': 'Corona Medical Monitors Camera Monitoring API', 'uiversion': 3, 'openapi': '3.0.2', 'version': __version__ } self.swagger = Swagger(self.app) @self.app.route('/ping/') def ping() ->str: """ ping --- description: get a pong """ return 'pong'
def integrate_prometheus_metrics(app: Flask, restful_api: Api): # metrics = RESTfulPrometheusMetrics(app, restful_api) metrics = PrometheusMetrics(app) metrics.info('app_info', 'Application info', version='1.0.3') metrics.register_default( metrics.summary('by_path_method_time_stamp_summary', 'Request summary by request paths, method, timestamp', labels={ 'path': lambda: request.path, 'method': lambda: request.method, 'status': lambda r: r.status_code, 'time_stamp': lambda: time.time() })) return metrics
def _setup_metrics(self): metrics = PrometheusMetrics(self.app) metrics.info('flask_app_info', 'Application info', version=os.environ.get('GIT_COMMIT') or 'unknown') metrics.info('flask_app_built_at', 'Application build timestamp').set( float(os.environ.get('BUILD_TIMESTAMP') or '0')) action_summary = Summary('webhook_proxy_actions', 'Action invocation metrics', labelnames=('http_route', 'http_method', 'action_type', 'action_index')) return action_summary
def create_app(): app = Flask(__name__, instance_relative_config=True) app.config.from_object(Config) app.url_map.converters['oid'] = ObjectIdConverter db.init_app(app) auth.init_app(app) views.init_app(app) metrics = PrometheusMetrics(app) metrics.info('backend_info', 'Backend Information', version='1.0.0') @app.route('/ping') @metrics.do_not_track() def ping(): return 'pong' return app
def _setup_metrics(self): metrics = PrometheusMetrics(self.app) metrics.info( "flask_app_info", "Application info", version=os.environ.get("GIT_COMMIT") or "unknown", ) metrics.info("flask_app_built_at", "Application build timestamp").set( float(os.environ.get("BUILD_TIMESTAMP") or "0")) action_summary = Summary( "webhook_proxy_actions", "Action invocation metrics", labelnames=("http_route", "http_method", "action_type", "action_index"), ) return action_summary
def create_app(): """Create Flask App.""" app = Flask(__name__, static_folder="static") # Register blueprints app.register_blueprint(index_blueprint) app.register_blueprint(api) sql_db = os.getenv("SQL_CONNECT", "sqlite://") app.config['SQLALCHEMY_DATABASE_URI'] = sql_db app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False # Setup Prometheus Metrics metrics = PrometheusMetrics(app) metrics.info('app_info', 'Log Anomaly Detector', version='v0.1.0.beta1') # Initialize db and tables db.init_app(app) with app.app_context(): db.create_all() return app
def init_flask_metrics(flask_app, export_defaults=True, **kwargs): global flask_metrics, enabled auth_enabled = True try: localconfig = anchore_engine.configuration.localconfig.get_config() metrics_config = localconfig.get("metrics", {}) # Handle typo in config. enabled == enable enabled = bool(metrics_config.get("enable", False)) if not enabled: enabled = bool(metrics_config.get("enabled", False)) auth_enabled = not bool(metrics_config.get("auth_disabled", False)) except Exception as err: logger.warn( "unable to determine if metrics are enabled - exception: " + str(err)) enabled = False if not enabled: flask_metrics = disabled_flask_metrics() return True if not flask_metrics: flask_metrics = PrometheusMetrics(flask_app, export_defaults=export_defaults, group_by_endpoint=True) if auth_enabled: flask_app.before_request(metrics_auth(flask_metrics.path)) flask_metrics.info("anchore_service_info", "Anchore Service Static Information", version=version, **kwargs) return True
import datetime import json import logging import os import re import requests import sys import time app = Flask(__name__) # Setup Prometheus Metrics for Flask app metrics = PrometheusMetrics(app, defaults_prefix="magtape") # Static information as metric metrics.info("app_info", "Application info", version="0.6") # Set logging config log = logging.getLogger("werkzeug") log.disabled = True magtape_log_level = os.environ["MAGTAPE_LOG_LEVEL"] app.logger.setLevel(magtape_log_level) # Set Global variables cluster = os.environ["MAGTAPE_CLUSTER_NAME"] magtape_namespace_name = os.environ["MAGTAPE_NAMESPACE_NAME"] magtape_pod_name = os.environ["MAGTAPE_POD_NAME"] # Set Slack related variables slack_enabled = os.environ["MAGTAPE_SLACK_ENABLED"] slack_passive = os.environ["MAGTAPE_SLACK_PASSIVE"]
import logging import boto3 import simplejson as json import urllib.parse import uuid import os from boto3.dynamodb.conditions import Key, Attr from flask import request from flask import Response from flask import Blueprint from prometheus_flask_exporter import PrometheusMetrics app = Flask(__name__) metrics = PrometheusMetrics(app) metrics.info('app_info', 'Database process') bp = Blueprint('app', __name__) #with open('config.json') as file: # data = json.load(file) # default to us-east-1 if no region is specified # (us-east-1 is the default/only supported region for a starter account) region = os.getenv('AWS_REGION', 'us-east-1') # these must be present; if they are missing, we should probably bail now access_key = os.getenv('AWS_ACCESS_KEY_ID') secret_access_key = os.getenv('AWS_SECRET_ACCESS_KEY') # this is only needed for starter accounts session_token = os.getenv('AWS_SESSION_TOKEN')
class Flacon(object): """Wrapper around Flask.""" LOG_FORMAT = ( "[%(asctime)s] %(levelname)s %(module)s " "[%(filename)s:%(funcName)s:%(lineno)d] (%(thread)d): %(message)s" ) def __init__(self, app_or_name, registry=None): """Build a new Flacon. Args: Either a flask.Flask or the name of the calling module. """ if isinstance(app_or_name, flask.Flask): self.app = app_or_name else: # Convenience constructor. self.app = flask.Flask(app_or_name) # Most small applications will work behind a reverse proxy and will # need this. If you don't want it, create the app yourself. self.app.wsgi_app = ProxyFix(self.app.wsgi_app) # The blueprints with our views. self.blueprint = flask.Blueprint( "flacon", __name__, template_folder="templates" ) self.host = "0.0.0.0" self.port = 8080 self.debug = False self.log_level = None self.twisted = False self.gunicorn = False self.threads = None self.metrics = None self.is_setup = False self.setup_blueprint() self.setup_prometheus(registry) self.setup_sentry(sentry_dsn=None) def setup_blueprint(self): """Initialize the blueprint.""" # Register endpoints. self.blueprint.add_url_rule("/", "status", self.status) self.blueprint.add_url_rule("/healthy", "health", self.healthy) self.blueprint.add_url_rule("/ready", "ready", self.ready) self.blueprint.add_url_rule("/threads", "threads", self.threads_bt) def _add_routes(self): """Add some nice default routes.""" if self.app.has_static_folder: self.add_url_rule("/favicon.ico", "favicon", self.favicon) self.add_url_rule("/", "__default_redirect_to_status", self.redirect_to_status) def setup(self, args=None): if self.is_setup: return # Args. if args is None: parser = self.get_argparser() args = parser.parse_args() self.host = args.host self.port = args.port self.debug = args.debug self.log_level = args.log_level self.twisted = args.twisted self.gunicorn = args.gunicorn self.threads = args.threads if not args.disable_embedded_logging: self.setup_logging(self.log_level) # Flask things self._add_routes() self.app.register_blueprint(self.blueprint, url_prefix="/-") def _context(): return {"flacon": self} # Add 'flacon' to the context. self.app.context_processor(_context) self.is_setup = True @staticmethod def get_argparser(parser=None): """Customize a parser to get the correct options.""" parser = parser or argparse.ArgumentParser() parser.add_argument("--host", default="0.0.0.0", help="Host listen address") parser.add_argument("--port", "-p", default=9050, help="Listen port", type=int) parser.add_argument( "--debug", "-d", default=False, action="store_true", help="Enable debug mode", ) parser.add_argument( "--log-level", "-l", default="INFO", help="Log Level, empty string to disable.", ) parser.add_argument( "--twisted", default=False, action="store_true", help="Use twisted to server requests.", ) parser.add_argument( "--gunicorn", default=False, action="store_true", help="Use gunicorn to server requests.", ) parser.add_argument( "--threads", default=None, help="Number of threads to use.", type=int ) parser.add_argument( "--disable-embedded-logging", default=False, action="store_true", help="Disable embedded logging configuration", ) return parser def setup_logging(self, log_level): """Setup logging.""" if not log_level: return # Remove existing logger. self.app.config["LOGGER_HANDLER_POLICY"] = "never" self.app.logger.propagate = True handler = logging.StreamHandler() handler.setFormatter(logging.Formatter(self.LOG_FORMAT)) self.app.logger.addHandler(handler) self.app.logger.setLevel(logging.getLevelName(log_level)) self.app.logger.info("Logging initialized.") def setup_prometheus(self, registry=None): """Setup Prometheus.""" kwargs = {} if registry: kwargs["registry"] = registry self.metrics = PrometheusMetrics(self.app, **kwargs) try: version = pkg_resources.require(self.app.name)[0].version except pkg_resources.DistributionNotFound: version = "unknown" self.metrics.info( "app_info", "Application info", version=version, appname=self.app.name ) self.app.logger.info("Prometheus is enabled.") def setup_sentry(self, sentry_dsn): sentry_dsn = sentry_dsn or os.getenv("SENTRY_DSN", None) if not Sentry or not sentry_dsn: return sentry = Sentry(dsn=sentry_dsn) sentry.init_app(self.app) self.app.logger.info("Sentry is enabled.") def add_url_rule(self, route, endpoint, handler): """Add a new url route. Args: See flask.Flask.add_url_route(). """ self.app.add_url_rule(route, endpoint, handler) @property def name(self): """Return the name of the current application.""" return self.app.import_name def redirect_to_status(self): """Redirect to the flacon index.""" return flask.redirect(flask.url_for("flacon.status")) def status(self): return flask.render_template("flacon/status.html") def is_healthy(self): return True def healthy(self): """Return 200 is healthy, else 500. Override is_healthy() to change the health check. """ try: if self.is_healthy(): return "OK", 200 else: return "FAIL", 500 except Exception as e: self.app.logger.exception(e) return str(e), 500 def is_ready(self): return True def ready(self): """Return 200 is ready, else 500. Override is_ready() to change the readiness check. """ try: if self.is_ready(): return "OK", 200 else: return "FAIL", 500 except Exception as e: self.app.logger.exception() return str(e), 500 def threads_bt(self): """Display thread backtraces.""" import threading import traceback threads = {} for thread in threading.enumerate(): frames = sys._current_frames().get(thread.ident) if frames: stack = traceback.format_stack(frames) else: stack = [] threads[thread] = "".join(stack) return flask.render_template("flacon/threads.html", threads=threads) def favicon(self): return flask.send_from_directory( self.app.static_folder, "favicon.ico", mimetype="image/vnd.microsoft.icon" ) def run(self, **options): """Run the application.""" if not self.is_setup: self.setup() if self.twisted: self.run_with_twisted(**options) elif self.gunicorn: self.run_with_gunicorn(**options) else: self.run_with_werkzeug(**options) def run_with_werkzeug(self, **options): """Run with werkzeug simple wsgi container.""" threaded = self.threads is not None and (self.threads > 0) self.app.run( host=self.host, port=self.port, debug=self.debug, threaded=threaded, **options ) def run_with_twisted(self, **options): """Run with twisted.""" from twisted.internet import reactor from twisted.python import log import flask_twisted twisted = flask_twisted.Twisted(self.app) if self.threads: reactor.suggestThreadPoolSize(self.threads) if self.log_level: log.startLogging(sys.stderr) twisted.run(host=self.host, port=self.port, debug=self.debug, **options) def run_with_gunicorn(self, **options): """Run with gunicorn.""" import gunicorn.app.base from gunicorn.six import iteritems import multiprocessing class FlaconApplication(gunicorn.app.base.BaseApplication): def __init__(self, app, options=None): self.options = options or {} self.application = app super(FlaconApplication, self).__init__() def load_config(self): config = dict( [ (key, value) for key, value in iteritems(self.options) if key in self.cfg.settings and value is not None ] ) for key, value in iteritems(config): self.cfg.set(key.lower(), value) def load(self): return self.application options = { "bind": "%s:%s" % (self.host, self.port), "workers": self.threads or ((multiprocessing.cpu_count() * 2) + 1), "debug": self.debug, **options, } FlaconApplication(self.app, options).run()
from flask import Response import jwt from prometheus_flask_exporter import PrometheusMetrics import requests import simplejson as json # The application app = Flask(__name__) metrics = PrometheusMetrics(app) metrics.info('app_info', 'User process') bp = Blueprint('app', __name__) db = { "name": "http://172.17.0.1:30004/api/v1/datastore", "endpoint": ["read", "write", "delete", "update"] } @bp.route('/', methods=['GET']) @metrics.do_not_track() def hello_world(): return ("If you are reading this in a browser, your service is " "operational. Switch to curl/Postman/etc to interact using the " "other HTTP verbs.")
import datetime import json import logging import os import re import requests import sys import time app = Flask(__name__) # Setup Prometheus Metrics for Flask app metrics = PrometheusMetrics(app, defaults_prefix="magtape") # Static information as metric metrics.info("app_info", "Application info", version="v2.3.0-prerelease") # Set logging config log = logging.getLogger("werkzeug") log.disabled = True magtape_log_level = os.environ["MAGTAPE_LOG_LEVEL"] app.logger.setLevel(magtape_log_level) # Set Global variables cluster = os.environ["MAGTAPE_CLUSTER_NAME"] magtape_namespace_name = os.environ["MAGTAPE_NAMESPACE_NAME"] magtape_pod_name = os.environ["MAGTAPE_POD_NAME"] # Set Slack related variables slack_enabled = os.environ["MAGTAPE_SLACK_ENABLED"] slack_passive = os.environ["MAGTAPE_SLACK_PASSIVE"]
from flask import Flask, g, request, redirect, render_template, flash from flask_httpauth import HTTPBasicAuth from prometheus_flask_exporter import PrometheusMetrics from docker_helper import read_configuration from config import configure, add_rule, delete_rule app = Flask(__name__) app.config['SECRET_KEY'] = read_configuration('SECRET_KEY', '/var/secrets/flask', 'InSecure') auth = HTTPBasicAuth() metrics = PrometheusMetrics(app) metrics.info('flask_app_info', 'Application info', version=os.environ.get('GIT_COMMIT') or 'unknown') metrics.info('flask_app_built_at', 'Application build timestamp').set( float(os.environ.get('BUILD_TIMESTAMP') or '0')) logging.basicConfig( format='%(asctime)s [%(levelname)s] %(module)s.%(funcName)s - %(message)s') logger = logging.getLogger('redirect-service') logger.setLevel(logging.INFO) _rules = {'simple': {}, 'regex': [], 'admin': None} @app.route('/', methods=['GET'], defaults={'_': None}) @app.route('/<path:_>', methods=['GET', 'POST'])
import datetime import json import logging import os import re import requests import sys import time app = Flask(__name__) # Setup Prometheus Metrics for Flask app metrics = PrometheusMetrics(app, defaults_prefix="magtape") # Static information as metric metrics.info("app_info", "Application info", version="v2.3.3") # Set logging config log = logging.getLogger("werkzeug") log.disabled = True magtape_log_level = os.environ["MAGTAPE_LOG_LEVEL"] app.logger.setLevel(magtape_log_level) # Set Global variables cluster = os.environ["MAGTAPE_CLUSTER_NAME"] magtape_namespace_name = os.environ["MAGTAPE_NAMESPACE_NAME"] magtape_pod_name = os.environ["MAGTAPE_POD_NAME"] # Set Slack related variables slack_enabled = os.environ["MAGTAPE_SLACK_ENABLED"] slack_passive = os.environ["MAGTAPE_SLACK_PASSIVE"]
import simplejson as json # Local libraries import tracing # The application # Integer value 0 <= v < 100, denoting proportion of # calls to `get_song` to return 500 from PERCENT_ERROR = 50 app = Flask(__name__) metrics = PrometheusMetrics(app) metrics.info('app_info', 'Music process') tracer = tracing.SimpleTracer("s2") db = { "name": "http://*****:*****@bp.route('/health') @metrics.do_not_track() def health(): return Response("", status=200, mimetype="application/json")
import cv2 import werkzeug from PIL import Image from numpy import asarray logging.basicConfig(level=logging.INFO) logging.info("Setting LOGLEVEL to INFO") # Declare a flask app app = Flask(__name__) api = Api(app) metrics = PrometheusMetrics(app) metrics.info("app_info", "App Info, this can be anything you want", version="1.0.0") app.config['MONGO_URI'] = os.environ.get('MONGO_URL') mongo = PyMongo(app) # Model saved with Keras model.save() MODEL_PATH = './models/datty.h5' # Load your own trained model model = load_model(MODEL_PATH) # model._make_predict_function() # Necessary print('Model loaded. Check http://127.0.0.1:5000/...')
import oauthlib.oauth2 import requests_oauthlib from prometheus_flask_exporter import PrometheusMetrics from edc_ogc import VERSION from edc_ogc.ogc.client import OGCClient, OGCRequest from edc_ogc.configapi import ConfigAPIDefaultLayers, ConfigAPI from edc_ogc.mdi import Mdi, MdiError # -------------- App setup -------------- app = Flask(__name__, static_url_path='/static') app.url_map.strict_slashes = False metrics = PrometheusMetrics(app) metrics.info('app_info', 'Application info', version=VERSION) # -------------- Logging setup -------------- logger = logging.getLogger(__name__) logging.config.dictConfig({ 'version': 1, 'formatters': { 'simple': { 'format': '%(levelname)s: %(message)s', }, 'verbose': { 'format': '[%(asctime)s][%(module)s] %(levelname)s: %(message)s', } },
from anomaly_detector.fact_store.api import FactStore import os from prometheus_client import Counter from prometheus_flask_exporter import PrometheusMetrics HUMAN_FEEDBACK_COUNT = Counter( "aiops_human_feedback", "count of number of human feedback provided by customer", ['customer_id', 'anomaly_status']) HUMAN_FEEDBACK_ERROR_COUNT = Counter( "aiops_human_feedback_error", "count of human feedback not able to write to db", ['err_msg']) app = Flask(__name__, static_folder="static") metrics = PrometheusMetrics(app) metrics.info('app_info', 'Log Anomaly Detector', version='v0.1.0.beta1') @app.route("/") def index(): """Render main html page for fact_store.""" _id = request.args.get("lad_id") _msg = request.args.get("message") _is_anomaly = request.args.get("is_anomaly") if _id is None: return render_template("index.html") return render_template("index.html", id=_id, msg=_msg, is_anomaly=_is_anomaly)
from flask import Flask, request, jsonify from prometheus_flask_exporter import PrometheusMetrics import mysql.connector import json app = Flask(__name__) app.config['JSONIFY_PRETTYPRINT_REGULAR'] = True metrics = PrometheusMetrics(app) metrics.info('app_info', 'Application info', version='1.0.3') connection = None cursor = None def connect_database(): global connection global cursor connection = mysql.connector.connect(host='database', port='3306', user='******', passwd='1234', database='mydb') cursor = connection.cursor() @app.route('/') def hello_world(): return "hello world"
import uuid from prometheus_flask_exporter import PrometheusMetrics ID = 0 SOURCE = 1 DESTINATION = 2 DEPARTURE_HOUR = 3 DEPARTURE_DAY = 4 DURATION = 5 CAPACITY = 6 app = Flask(__name__) metrics = PrometheusMetrics(app) connection = None metrics.info('backend', 'Backend prometheus metrics', version='1.0.3') def initialiaze_db_connection(): global connection with open('config.json', 'r') as f: config = json.load(f) connection = mysql.connector.connect(**config) connection.autocommit = True @app.route('/hero-stats/', methods=['POST']) def hero_stats() -> str: global connection
) application = app.app # create tracer and put it in the application configuration Configuration.tracer = init_jaeger_tracer("stub_api") # create metrics and manager metrics = PrometheusMetrics(application) manager = Manager(application) # Needed for session. application.secret_key = Configuration.APP_SECRET_KEY # static information as metric metrics.info("stub_api_info", "Stub API info", version=__version__) @app.route("/") @metrics.do_not_track() def base_url(): """Redirect to UI by default.""" return redirect("api/v1/ui") @app.route("/api/v1") @metrics.do_not_track() def api_v1(): """Provide a listing of all available endpoints.""" paths = []
_GRAFANA_REDIRECT_URL = os.getenv( "THOTH_AIDEVSECOPS_GRAFANA_REDIRECT_URL", "https://github.com/thoth-station/elyra-aidevsecops-tutorial/blob/master/README.md", ) application = Flask("aidevsecops-tutorial") # Add Cross Origin Request Policy to all CORS(application) prometheus_metrics = PrometheusMetrics(application, group_by="endpoint") # static information as metric prometheus_metrics.info("aidevsecops_tutorial_app_info", "App version deployed", version=__version__) model = Model() # custom metric to expose model version model_version_metric = prometheus_metrics.info( "aidevsecops_tutorial_model_info", "Model version deployed", version=model.model_version, # label ) @application.before_first_request def before_first_request_callback(): """Register callback, runs before first request to this service."""
import datetime from flask import Flask, flash, request, redirect, url_for, render_template from prometheus_flask_exporter import PrometheusMetrics from werkzeug.utils import secure_filename from flask import send_from_directory from influxdb import InfluxDBClient UPLOAD_FOLDER = None ALLOWED_EXTENSIONS = set(['zip', 'rar', 'tar']) CLIENT = None DB_NAME = "users" LOG_FILE = '/var/log/webserver' app = Flask(__name__) metrics = PrometheusMetrics(app) metrics.info('webserver_info', 'Application info', version='1.0.3') # Ensure that only allowed files will be uploaded def allowed_file(filename): return '.' in filename and \ filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS @app.route('/', methods=['GET', 'POST']) def welcome(): """ This function is used to render welcome page. We don't accept POST/GET methods here because we use redirect. """ return render_template("index.html")
_LOGGER.setLevel( logging.DEBUG if bool(int(os.getenv("AMUN_DEBUG", 0))) else logging.INFO) # Expose for uWSGI. app = connexion.App(__name__) application = app.app app.add_api(Configuration.SWAGGER_YAML_PATH) metrics = PrometheusMetrics(application) manager = Manager(application) # Needed for session. application.secret_key = Configuration.APP_SECRET_KEY # static information as metric metrics.info("amun_api_info", "Amun API info", version=__version__) @app.route("/") @metrics.do_not_track() def base_url(): """Redirect to UI by default.""" return redirect("api/v1/ui") @app.route("/api/v1") @metrics.do_not_track() def api_v1(): """Provide a listing of all available endpoints.""" paths = []
# create metrics and manager metrics = PrometheusMetrics(application, group_by="endpoint", excluded_paths=[ "/liveness", "/readiness", "/api/v1/ui", "/api/v1/openapi", ]) manager = Manager(application) # Needed for session. application.secret_key = Configuration.APP_SECRET_KEY # static information as metric metrics.info("user_api_info", "User API info", version=__service_version__) _API_GAUGE_METRIC = metrics.info("user_api_schema_up2date", "User API schema up2date") class _GraphDatabaseWrapper: """A wrapper for lazy graph database adapter handling.""" _graph = GraphDatabase() def __getattr__(self, item): """Connect to the database lazily on first call.""" if not self._graph.is_connected(): self._graph.connect() return getattr(self._graph, item)
from dataclasses import dataclass, asdict from enum import Enum from typing import Dict from flask import Flask, request, jsonify from flask_cors import CORS, cross_origin from prometheus_flask_exporter import PrometheusMetrics from jaeger_client import Config from opentracing.propagation import Format app = Flask(__name__) cors = CORS(app) app.config['CORS_HEADERS'] = 'Content-Type' metrics = PrometheusMetrics(app) metrics.info('workout_gateway', 'Workout Gateway', version='1.0.3') config = Config( config={ 'sampler': { 'type': 'const', 'param': 1, }, 'logging': True, }, service_name='workout-gateway', validate=True, ) tracer = config.initialize_tracer()