def serve(port=8000, profile=False, site=None, sites_path='.'): global application, _site, _sites_path _site = site _sites_path = sites_path from werkzeug.serving import run_simple if profile: application = ProfilerMiddleware(application, sort_by=('cumtime', 'calls')) if not os.environ.get('NO_STATICS'): application = SharedDataMiddleware(application, { '/assets': os.path.join(sites_path, 'assets'), }) application = StaticDataMiddleware(application, { '/files': os.path.abspath(sites_path) }) application.debug = True application.config = { 'SERVER_NAME': 'localhost:8000' } in_test_env = os.environ.get('CI') if in_test_env: log = logging.getLogger('werkzeug') log.setLevel(logging.ERROR) run_simple('0.0.0.0', int(port), application, use_reloader=not in_test_env, use_debugger=not in_test_env, use_evalex=not in_test_env, threaded=True)
def serve(port=8000, profile=False, site=None, sites_path='.'): global application, _site, _sites_path _site = site _sites_path = sites_path from werkzeug.serving import run_simple if profile: application = ProfilerMiddleware(application, sort_by=('cumtime', 'calls')) if not os.environ.get('NO_STATICS'): application = SharedDataMiddleware(application, { b'/assets': os.path.join(sites_path, 'assets').encode("utf-8"), }) application = StaticDataMiddleware(application, { b'/files': os.path.abspath(sites_path).encode("utf-8") }) application.debug = True application.config = { 'SERVER_NAME': 'localhost:8000' } run_simple('0.0.0.0', int(port), application, use_reloader=True, use_debugger=True, use_evalex=True, threaded=True)
def serve(port=8000, profile=False, site=None, sites_path="."): global application, _site, _sites_path _site = site _sites_path = sites_path from werkzeug.serving import run_simple if profile: application = ProfilerMiddleware(application, sort_by=("tottime", "calls")) if not os.environ.get("NO_STATICS"): application = SharedDataMiddleware( application, {b"/assets": os.path.join(sites_path, "assets").encode("utf-8")} ) application = StaticDataMiddleware(application, {b"/files": os.path.abspath(sites_path).encode("utf-8")}) application.debug = True application.config = {"SERVER_NAME": "localhost:8000"} run_simple("0.0.0.0", int(port), application, use_reloader=True, use_debugger=True, use_evalex=True, threaded=True)
from .config2 import * import boto3 import sys # Here! from werkzeug.contrib.cache import MemcachedCache from werkzeug.contrib.profiler import ProfilerMiddleware from werkzeug.contrib.profiler import MergeStream app = Flask(__name__) app.config.from_object('instance.ProductionConfig') app.secret_key = 'adfasjfh23437fhufhskjfd' # Here! app.wsgi_app = ProfilerMiddleware(app.wsgi_app, open('profiler.log', 'w'), restrictions=[30]) jsonrpc = JSONRPC(app, '/api/') file = open('../centrifugo/config.json', 'r') config = json.load(file) app.centrifugo_secret = config['secret'] file.close() oauth = OAuth() # Here! cache = MemcachedCache(['127.0.0.1']) cache.clear()
from flaskapp.app import create_app from werkzeug.debug import DebuggedApplication from werkzeug.contrib.profiler import ProfilerMiddleware import os application = create_app() DEBUG = os.environ.get('DEBUG', False) if DEBUG: application.debug = True application.wsgi_app = DebuggedApplication(application.wsgi_app, True) application.config['PROFILE'] = True application.wsgi_app = ProfilerMiddleware(application.wsgi_app, restrictions=[30])
def run(argv): parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('-p', '--port', action='store', type=int, default=5000, help="Port to listen on.") parser.add_argument('-H', '--host', action='store', type=str, default='localhost', help="Host for the webserver.") parser.add_argument('-s', '--settings', action='store', type=str, help="Configuration-file for beancount-web.") parser.add_argument('-d', '--debug', action='store_true', help="Turn on debugging. This uses the built-in Flask \ webserver, and live-reloading of beancount-files is disabled." ) parser.add_argument('--profile', action='store_true', help="Turn on profiling. Implies --debug. Profiling \ information for each request will be printed to the \ log, unless --pstats-output is also specified.") parser.add_argument('--pstats-output', type=str, help="Output directory for profiling pstats data. \ Implies --profile. If this is specified, \ profiling information will be saved to the \ specified directly and will not be printed to \ the log.") parser.add_argument( '--profile-restriction', type=int, default=30, help='Maximum number of functions to show in profile printed \ to the log.') parser.add_argument('filename', type=str, help="Beancount input file.") args = parser.parse_args(argv) if args.pstats_output is not None: args.profile = True if args.profile: args.debug = True app.beancount_file = args.filename app.api = BeancountReportAPI(app.beancount_file) app.user_config = configparser.ConfigParser() user_config_defaults_file = os.path.join( os.path.dirname(os.path.realpath(__file__)), 'default-settings.conf') app.user_config.readfp(open(user_config_defaults_file)) app.user_config['beancount-web'][ 'file_defaults'] = user_config_defaults_file app.user_config['beancount-web']['file_user'] = '' if args.settings: app.user_config['beancount-web']['file_user'] = os.path.realpath( args.settings) app.user_config.read(app.user_config['beancount-web']['file_user']) if args.debug: if args.profile: from werkzeug.contrib.profiler import ProfilerMiddleware app.config['PROFILE'] = True kwargs = {} if args.pstats_output is not None: kwargs['profile_dir'] = args.pstats_output app.wsgi_app = ProfilerMiddleware( app.wsgi_app, restrictions=[args.profile_restriction], **kwargs) app.config['ASSETS_CACHE'] = True app.config['ASSETS_DEBUG'] = True app.run(args.host, args.port, args.debug) else: server = Server(app.wsgi_app) reload_source_files(server) server.serve(port=args.port, host=args.host, debug=args.debug)
def init_app(cls, app): from werkzeug.contrib.profiler import ProfilerMiddleware app.wsgi_app = ProfilerMiddleware( app.wsgi_app, restrictions=[cls.PROFILER_NUM_FUNCTION_RESTRICTIONS], profile_dir=cls.PROFILER_DIR)
def profile(length=25, profile_dir=None): """ Start the application under the code profiler.""" from werkzeug.contrib.profiler import ProfilerMiddleware app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[length], profile_dir=profile_dir)
def profile(length=30, profile_dir=None): from werkzeug.contrib.profiler import ProfilerMiddleware app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[ length], profile_dir=profile_dir) app.run()
# @Date: 2016-06-19 18:36:54 # @Last modified by: Brian Cherinka # @Last Modified time: 2016-06-19 18:39:21 from __future__ import print_function, division, absolute_import from flask import Flask from werkzeug.contrib.profiler import ProfilerMiddleware from myapp import create_app import argparse # -------------------------- # Parse command line options # -------------------------- parser = argparse.ArgumentParser(description='Script to start the SDSS API.') parser.add_argument('-p', '--port', help='Port to use in debug mode.', default=5000, type=int, required=False) args = parser.parse_args() # Start the Profiler app and runs in DEBUG mode app = create_app(debug=True) app.config['PROFILE'] = True app.wsgi_app = ProfilerMiddleware(app.wsgi_app) app.run(debug=True, port=args.port)
def profile(length=25, profile_dir=None): """用python manage.py profile命令启动分析器""" from werkzeug.contrib.profiler import ProfilerMiddleware app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[length], profile_dir=profile_dir) app.run()
def create_app(config_name): app = Flask(__name__, static_url_path='') app.config.from_object(config[config_name]) app.config.from_envvar("OPENTARGETS_API_LOCAL_SETTINGS", silent=True) config[config_name].init_app(app) api_version = app.config['API_VERSION'] api_version_minor = app.config['API_VERSION_MINOR'] # log_level = logging.INFO # if app.config['DEBUG']: # log_level = logging.DEBUG # Flask has a default logger which works well and pushes to stderr # if you want to add different handlers (to file, or logstash, or whatever) # you can use code similar to the one below and set the error level accordingly. # logHandler = logging.StreamHandler() # formatter = jsonlogger.JsonFormatter() # logHandler.setFormatter(formatter) # loghandler.setLevel(logging.INFO) # app.logger.addHandler(logHandler) # or for LOGSTASH # app.logger.addHandler(logstash.LogstashHandler(app.config['LOGSTASH_HOST'], app.config['LOGSTASH_PORT'], version=1)) app.logger.info('looking for elasticsearch at: %s' % app.config['ELASTICSEARCH_URL']) print('looking for elasticsearch at: %s' % app.config['ELASTICSEARCH_URL']) app.extensions['redis-core'] = Redis(app.config['REDIS_SERVER_PATH'], db=0) #served data app.extensions['redis-service'] = Redis( app.config['REDIS_SERVER_PATH'], db=1) #cache, rate limit and internal things app.extensions['redis-user'] = Redis(app.config['REDIS_SERVER_PATH'], db=2) # user info '''setup cache''' app.extensions['redis-service'].config_set('save', '') app.extensions['redis-service'].config_set('appendonly', 'no') icache = InternalCache(app.extensions['redis-service'], str(api_version_minor)) ip2org = IP2Org(icache) es = Elasticsearch( app.config['ELASTICSEARCH_URL'], # # sniff before doing anything # sniff_on_start=True, # # refresh nodes after a node fails to respond # sniff_on_connection_fail=True, # # and also every 60 seconds # sniffer_timeout=60 timeout=60 * 20, maxsize=100, ) '''elasticsearch handlers''' app.extensions['esquery'] = esQuery( es, DataTypes(app), DataSourceScoring(app), index_data=app.config['ELASTICSEARCH_DATA_INDEX_NAME'], index_efo=app.config['ELASTICSEARCH_EFO_LABEL_INDEX_NAME'], index_eco=app.config['ELASTICSEARCH_ECO_INDEX_NAME'], index_genename=app.config['ELASTICSEARCH_GENE_NAME_INDEX_NAME'], index_expression=app.config['ELASTICSEARCH_EXPRESSION_INDEX_NAME'], index_reactome=app.config['ELASTICSEARCH_REACTOME_INDEX_NAME'], index_association=app. config['ELASTICSEARCH_DATA_ASSOCIATION_INDEX_NAME'], index_search=app.config['ELASTICSEARCH_DATA_SEARCH_INDEX_NAME'], index_relation=app.config['ELASTICSEARCH_DATA_RELATION_INDEX_NAME'], docname_data=app.config['ELASTICSEARCH_DATA_DOC_NAME'], docname_efo=app.config['ELASTICSEARCH_EFO_LABEL_DOC_NAME'], docname_eco=app.config['ELASTICSEARCH_ECO_DOC_NAME'], docname_genename=app.config['ELASTICSEARCH_GENE_NAME_DOC_NAME'], docname_expression=app.config['ELASTICSEARCH_EXPRESSION_DOC_NAME'], docname_reactome=app. config['ELASTICSEARCH_REACTOME_REACTION_DOC_NAME'], docname_association=app. config['ELASTICSEARCH_DATA_ASSOCIATION_DOC_NAME'], docname_search=app.config['ELASTICSEARCH_DATA_SEARCH_DOC_NAME'], # docname_search_target=app.config['ELASTICSEARCH_DATA_SEARCH_TARGET_DOC_NAME'], # docname_search_disease=app.config['ELASTICSEARCH_DATA_SEARCH_DISEASE_DOC_NAME'], docname_relation=app.config['ELASTICSEARCH_DATA_RELATION_DOC_NAME'], log_level=app.logger.getEffectiveLevel(), cache=icache) app.extensions['es_access_store'] = esStore( es, eventlog_index=app.config['ELASTICSEARCH_LOG_EVENT_INDEX_NAME'], ip2org=ip2org, ) '''mixpanel handlers''' if Config.MIXPANEL_TOKEN: mp = Mixpanel(Config.MIXPANEL_TOKEN, consumer=AsyncBufferedConsumer()) app.extensions['mixpanel'] = mp app.extensions['mp_access_store'] = MixPanelStore( mp, ip2org=ip2org, ) app.extensions['proxy'] = ProxyHandler( allowed_targets=app.config['PROXY_SETTINGS']['allowed_targets'], allowed_domains=app.config['PROXY_SETTINGS']['allowed_domains'], allowed_request_domains=app.config['PROXY_SETTINGS'] ['allowed_request_domains']) basepath = app.config['PUBLIC_API_BASE_PATH'] + api_version # cors = CORS(app, resources=r'/api/*', allow_headers='Content-Type,Auth-Token') ''' define cache''' # cache = Cache(config={'CACHE_TYPE': 'simple'}) # cache.init_app(latest_blueprint) # latest_blueprint.cache = cache # latest_blueprint.extensions['cache'] = cache # app.cache = SimpleCache() app.cache = FileSystemCache('/tmp/cttv-rest-api-cache', threshold=100000, default_timeout=60 * 60, mode=777) '''Set usage limiter ''' # limiter = Limiter(global_limits=["2000 per hour", "20 per second"]) # limiter.init_app(app)# use redis to store limits '''Load api keys in redis''' rate_limit_file = app.config['USAGE_LIMIT_PATH'] if not os.path.exists(rate_limit_file): rate_limit_file = '../' + rate_limit_file if os.path.exists(rate_limit_file): with open(rate_limit_file) as csvfile: reader = csv.DictReader(csvfile) for row in reader: auth_key = AuthKey(**row) app.extensions['redis-user'].hmset(auth_key.get_key(), auth_key.__dict__) print('INFO - succesfully loaded rate limit file') else: print('ERROR - cannot find rate limit file') app.logger.error( 'cannot find rate limit file: %s. RATE LIMIT QUOTA LOAD SKIPPED!' % rate_limit_file) '''load ip name resolution''' ip_resolver = defaultdict(lambda: "PUBLIC") ip_list_file = app.config['IP_RESOLVER_LIST_PATH'] if not os.path.exists(ip_list_file): ip_list_file = '../' + ip_list_file if os.path.exists(ip_list_file): with open(ip_list_file) as csvfile: reader = csv.DictReader(csvfile) for row in reader: net = IPNetwork(row['ip']) ip_resolver[net] = row['org'] else: app.logger.warning( 'cannot find IP list for IP resolver. All traffic will be logged as PUBLIC' ) app.config['IP_RESOLVER'] = ip_resolver '''compress http response''' compress = Compress() compress.init_app(app) latest_blueprint = Blueprint('latest', __name__) current_version_blueprint = Blueprint(str(api_version), __name__) current_minor_version_blueprint = Blueprint(str(api_version_minor), __name__) specpath = '/cttv' if app.config['PROFILE'] == True: from werkzeug.contrib.profiler import ProfilerMiddleware app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30]) create_api(latest_blueprint, api_version, specpath) create_api(current_version_blueprint, api_version, specpath) create_api(current_minor_version_blueprint, api_version_minor, specpath) app.register_blueprint(latest_blueprint, url_prefix='/api/latest') app.register_blueprint(current_version_blueprint, url_prefix='/api/' + str(api_version)) app.register_blueprint(current_minor_version_blueprint, url_prefix='/api/' + str(api_version_minor)) @app.route('/api-docs/%s' % str(api_version_minor)) def docs_current_minor_version(): return redirect('/api/swagger/index.html') @app.route('/api-docs/%s' % str(api_version)) def docs_current_version(): return redirect('/api/swagger/index.html') @app.route('/api-docs') def docs(): return redirect('/api/swagger/index.html') def serve_swagger(): return app.send_static_file('docs/swagger/swagger.yaml') @app.route('/api/docs/swagger.yaml') def send_swagger(): return serve_swagger() @app.route('/api/latest/docs/swagger.yaml') def send_swagger_latest(): return serve_swagger() @app.route('/api/' + str(api_version) + '/docs/swagger.yaml') def send_swagger_current_cersion(): return serve_swagger() @app.before_request def before_request(): g.request_start = datetime.now() @app.after_request def after(resp): rate_limiter = RateLimiter() now = datetime.now() took = (now - g.request_start).total_seconds() * 1000 if took > 500: cache_time = str( int(3600 * took) ) # set cache to last one our for each second spent in the request resp.headers.add('X-Accel-Expires', cache_time) took = int(round(took)) LogApiCallWeight(took) # if took < RateLimiter.DEFAULT_CALL_WEIGHT: # took = RateLimiter.DEFAULT_CALL_WEIGHT current_values = increment_call_rate(took, rate_limiter) now = datetime.now() ceil10s = round(ceil_dt_to_future_time(now, 10), 2) ceil1h = round(ceil_dt_to_future_time(now, 3600), 2) usage_left_10s = rate_limiter.short_window_rate - current_values[ 'short'] usage_left_1h = rate_limiter.long_window_rate - current_values['long'] min_ceil = ceil10s if usage_left_1h < 0: min_ceil = ceil1h if (usage_left_10s < 0) or (usage_left_1h < 0): resp.headers.add('Retry-After', min_ceil) resp.headers.add('X-API-Took', took) resp.headers.add('X-Usage-Limit-10s', rate_limiter.short_window_rate) resp.headers.add('X-Usage-Limit-1h', rate_limiter.long_window_rate) resp.headers.add('X-Usage-Remaining-10s', usage_left_10s) resp.headers.add('X-Usage-Remaining-1h', usage_left_1h) # resp.headers.add('X-Usage-Limit-Reset-10s', ceil10s) # resp.headers.add('X-Usage-Limit-Reset-1h', ceil1h) resp.headers.add('Access-Control-Allow-Origin', '*') resp.headers.add('Access-Control-Allow-Headers', 'Content-Type,Auth-Token') if do_not_cache(request): # do not cache in the browser resp.headers.add('Cache-Control', "no-cache, must-revalidate, max-age=0") else: resp.headers.add( 'Cache-Control', "no-transform, public, max-age=%i, s-maxage=%i" % (took * 1800 / 1000, took * 9000 / 1000)) return resp return app
def configure_profiler(app): if app.config.get('PROFILE'): app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30])
import sys from werkzeug.contrib.profiler import ProfilerMiddleware, MergeStream from eudat_http_api import app app.config['PROFILE'] = True f = open('profiler.log', 'w') stream = MergeStream(sys.stdout, f) app.wsgi_app = ProfilerMiddleware(app.wsgi_app, stream) app.run(debug=True)
# -*- coding: UTF-8 -*- from gevent import monkey monkey.patch_all(thread=False) from werkzeug.contrib.fixers import ProxyFix from werkzeug.contrib.profiler import ProfilerMiddleware from .app import create_app __all__ = ['app'] #: WSGI endpoint app = create_app() app.wsgi_app = ProxyFix(app.wsgi_app) if app.config.get('PROFILING', False): app.wsgi_app = ProfilerMiddleware( app.wsgi_app, profile_dir=app.config['PROFILING_DIR'])
from werkzeug.contrib.profiler import ProfilerMiddleware from application import application # This is your Flask app application.wsgi_app = ProfilerMiddleware(application.wsgi_app) application.run(debug=True) # Standard run call
#!flask/bin/python from werkzeug.contrib.profiler import ProfilerMiddleware from barrabrasil import app app.config['PROFILE'] = True app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30]) #30 que mais demoraram import webbrowser webbrowser.open("http://127.0.0.1:5000/", new=1) app.run(host='0.0.0.0', debug=True)
def profile(length, profile_dir): """开始一个在源码分析器下的应用""" from werkzeug.contrib.profiler import ProfilerMiddleware app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[length], profile_dir=profile_dir) app.run(debug=False)
def run(): fcrepo = create_app(config['application']) fcrepo.wsgi_app = ProfilerMiddleware(fcrepo.wsgi_app, **options) fcrepo.config['PROFILE'] = True fcrepo.run(debug = True)
def profile(): """Start the application under the code profiler.""" from werkzeug.contrib.profiler import ProfilerMiddleware app.wsgi_app = ProfilerMiddleware(app.wsgi_app) app.run()
def create_app(**config_overrides): """ Creates a flask application with the desired configuration settings and connects it to the database. """ app = Flask(__name__) # Initialize logging _configure_logging() # Initialize configuration app.config.from_object("teamserver.config") app.config["MODE"] = MODE app.config["MONGODB_SETTINGS"] = { "db": DB_NAME, "host": DB_HOST, "port": DB_PORT } if DB_USER and DB_PASS: app.config["MONGODB_SETTINGS"]["username"] = DB_USER app.config["MONGODB_SETTINGS"]["password"] = DB_PASS app.config["CELERY_BROKER_URL"] = CELERY_BROKER_URL app.config["CELERY_RESULT_BACKEND"] = CELERY_RESULT_BACKEND # Override configuration options app.config.update(config_overrides) # Initialize DEBUG if MODE.upper() == "DEBUG": # Enable debug logging app.logger.setLevel(logging.DEBUG) # Enable profiling from werkzeug.contrib.profiler import ProfilerMiddleware app.config["PROFILE"] = True app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[50], profile_dir=PROFILE_DIR) # Enable mongodb debug toolbar from flask_debugtoolbar import DebugToolbarExtension app.config["DEBUG_TB_PANELS"] = [ "flask_mongoengine.panels.MongoDebugPanel" ] app.debug_toolbar = DebugToolbarExtension(app) else: app.logger.setLevel(logging.WARNING) # Initialize the database try: DB.init_app(app) except MongoEngineConnectionError as conn_err: print(conn_err) sys.exit("Could not connect to database.") # Import endpoints from teamserver.router import API app.register_blueprint(API) app.logger.info(f"Initialized Arsenal Teamserver [{MODE}]") return app
def __init__(self, args=None, default_port=8080, desc=''): """ :param args: CLI arguments :param int default_port: The default port that the application will use :param str desc: The description for the application to be started """ parser = ArgumentParser(description=desc) parser.add_argument('-p', '--port', type=int, default=default_port, help='Port to listen on (default %s)' % default_port) parser.add_argument('-b', '--bind', default='0.0.0.0', help='Address to listen on (default 0.0.0.0)') parser.add_argument('-t', '--threads', type=int, default=4, help='Number of threads to use (default 4)') parser.add_argument('--debug', action='store_true', help='Enable debug mode') parser.add_argument('--profile', action='store_true', help='Enable profile mode') parser.add_argument('--live', action='store_true', help='Add live-web handler at /live') parser.add_argument('--record', action='store_true', help='Enable recording from the live web') parser.add_argument('--proxy', help='Enable HTTP/S proxy on specified collection') parser.add_argument( '-pt', '--proxy-default-timestamp', help='Default timestamp / ISO date to use for proxy requests') parser.add_argument( '--proxy-record', action='store_true', help='Enable proxy recording into specified collection') parser.add_argument( '--proxy-enable-wombat', action='store_true', help='Enable partial wombat JS overrides support in proxy mode') parser.add_argument( '--enable-auto-fetch', action='store_true', help= 'Enable auto-fetch worker to capture resources from stylesheets, <img srcset> when running in live/recording mode' ) self.desc = desc self.extra_config = {} self._extend_parser(parser) self.r = parser.parse_args(args) logging.basicConfig( format='%(asctime)s: [%(levelname)s]: %(message)s', level=logging.DEBUG if self.r.debug else logging.INFO) if self.r.proxy: self.extra_config['proxy'] = { 'coll': self.r.proxy, 'recording': self.r.proxy_record, 'enable_wombat': self.r.proxy_enable_wombat, 'default_timestamp': self.r.proxy_default_timestamp, } self.r.live = True self.extra_config['enable_auto_fetch'] = self.r.enable_auto_fetch self.application = self.load() if self.r.profile: from werkzeug.contrib.profiler import ProfilerMiddleware self.application = ProfilerMiddleware(self.application)
config.add_request_method(mw.pg_connection_request_property, name='pg_connection', reify=True) config.add_request_method(mw.userid_request_property, name='userid', reify=True) config.add_request_method(mw.log_exc_request_method, name='log_exc') config.add_request_method(mw.web_input_request_method, name='web_input') config.add_request_method(mw.set_cookie_on_response) config.add_request_method(mw.delete_cookie_on_response) wsgi_app = config.make_wsgi_app() wsgi_app = mw.InputWrapMiddleware(wsgi_app) wsgi_app = mw.URLSchemeFixingMiddleware(wsgi_app) if d.config_read_bool('profile_responses', section='backend'): from werkzeug.contrib.profiler import ProfilerMiddleware wsgi_app = ProfilerMiddleware(wsgi_app, profile_dir=m.MACRO_STORAGE_ROOT + 'profile-stats') if d.config_obj.has_option('sentry', 'dsn'): wsgi_app = mw.SentryEnvironmentMiddleware( wsgi_app, d.config_obj.get('sentry', 'dsn')) configure_libweasyl( dbsession=d.sessionmaker, not_found_exception=HTTPNotFound, base_file_path=m.MACRO_STORAGE_ROOT, staff_config_dict=staff_config.load(), media_link_formatter_callback=format_media_link, )
def flaskrun(app, default_host="127.0.0.1", default_port="5000"): """ Takes a flask.Flask instance and runs it. Parses command-line flags to configure the app. """ # Set up the command-line options parser = optparse.OptionParser() parser.add_option("-H", "--host", help="Hostname of the Flask app " + \ "[default %s]" % default_host, default=default_host) parser.add_option("-P", "--port", help="Port for the Flask app " + \ "[default %s]" % default_port, default=default_port) # Two options useful for debugging purposes, but # a bit dangerous so not exposed in the help message. parser.add_option("-d", "--debug", action="store_true", dest="debug", help=optparse.SUPPRESS_HELP) parser.add_option("-p", "--profile", action="store_true", dest="profile", help=optparse.SUPPRESS_HELP) parser.add_option("-u", "--userid", action="store", dest="userid", help="User id for FitBit bracelet") parser.add_option( "-f", "--first", action="store_true", dest="first", help= "Type this command if it's first time configuration, then the user has to register to the system before room station could be loaded" ) options, _ = parser.parse_args() # If the user selects the profiling option, then we need # to do a little extra setup if options.profile: from werkzeug.contrib.profiler import ProfilerMiddleware app.config['PROFILE'] = True app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30]) options.debug = True # This server must start with a userid if options.userid: app.config['USERID'] = options.userid # Check if it's first time starting room station if options.first: app.config['FIRST'] = True else: app.config['FIRST'] = False app.run(debug=options.debug, host=options.host, port=int(options.port))
def create_app(_read_config=True, profiler_directory=None, **config): app = flask.Flask(__name__, static_folder=None, template_folder=os.path.join(PROJECT_ROOT, 'templates')) if profiler_directory: app.config['PROFILE'] = True app.wsgi_app = ProfilerMiddleware(app.wsgi_app, profile_dir=profiler_directory) app.wsgi_app = ProxyFix(app.wsgi_app) # This key is insecure and you should override it on the server app.config[ 'SECRET_KEY'] = 't\xad\xe7\xff%\xd2.\xfe\x03\x02=\xec\xaf\\2+\xb8=\xf7\x8a\x9aLD\xb1' app.config['SQLALCHEMY_COMMIT_ON_TEARDOWN'] = True app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql:///changes' app.config['SQLALCHEMY_POOL_SIZE'] = 60 app.config['SQLALCHEMY_MAX_OVERFLOW'] = 20 # required for flask-debugtoolbar and the db perf metrics we record app.config['SQLALCHEMY_RECORD_QUERIES'] = True app.config['REDIS_URL'] = 'redis://localhost/0' app.config['DEBUG'] = True app.config['HTTP_PORT'] = 5000 app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 0 app.config['API_TRACEBACKS'] = True # Expiration delay between when a snapshot image becomes superceded and when # it becomes truly expired (and thus no longer included in the sync information # for any cluster that runs that particular image's plan) app.config['CACHED_SNAPSHOT_EXPIRATION_DELTA'] = timedelta(hours=1) # default snapshot ID to use when no project-specific active image available app.config['DEFAULT_SNAPSHOT'] = None app.config['SNAPSHOT_S3_BUCKET'] = None app.config['LXC_PRE_LAUNCH'] = None app.config['LXC_POST_LAUNCH'] = None # Location of artifacts server that is passed to changes-client # (include http:// or https://) app.config['ARTIFACTS_SERVER'] = None app.config['CHANGES_CLIENT_DEFAULT_BUILD_TYPE'] = 'legacy' # This is a hash from each build type (string identifiers used in # build step configuration) to a "build spec", a definition of # how to use changes-client to build. To use changes-client, the key # 'uses_client' must be set to True. # # Required build spec keys for client: # adapter -> basic or lxc # jenkins-command -> command to run from jenkins directly ($JENKINS_COMMAND) # commands -> array of hash from script -> string that represents a script # # Optional keys (lxc-only) # pre-launch -> lxc pre-launch script # post-launch -> lxc post-launch script # release -> lxc release app.config['CHANGES_CLIENT_BUILD_TYPES'] = { 'legacy': { 'uses_client': False }, } app.config['CELERY_ACCEPT_CONTENT'] = ['changes_json'] app.config['CELERY_ACKS_LATE'] = True app.config['CELERY_BROKER_URL'] = 'redis://localhost/0' app.config['CELERY_DEFAULT_QUEUE'] = "default" app.config['CELERY_DEFAULT_EXCHANGE'] = "default" app.config['CELERY_DEFAULT_EXCHANGE_TYPE'] = "direct" app.config['CELERY_DEFAULT_ROUTING_KEY'] = "default" app.config['CELERY_DISABLE_RATE_LIMITS'] = True app.config['CELERY_IGNORE_RESULT'] = True app.config['CELERY_RESULT_BACKEND'] = None app.config['CELERY_RESULT_SERIALIZER'] = 'changes_json' app.config['CELERY_SEND_EVENTS'] = False app.config['CELERY_TASK_RESULT_EXPIRES'] = 1 app.config['CELERY_TASK_SERIALIZER'] = 'changes_json' app.config['CELERYD_PREFETCH_MULTIPLIER'] = 1 app.config['CELERYD_MAX_TASKS_PER_CHILD'] = 10000 # By default, Celery logs writes to stdout/stderr as WARNING, which # is a bit harsh considering that some of the code is code we don't # own calling 'print'. This flips the default back to INFO, which seems # more appropriate. Can be overridden by the Changes config. app.config['CELERY_REDIRECT_STDOUTS_LEVEL'] = 'INFO' app.config['CELERY_QUEUES'] = ( Queue('job.sync', routing_key='job.sync'), Queue('job.create', routing_key='job.create'), Queue('celery', routing_key='celery'), Queue('events', routing_key='events'), Queue('default', routing_key='default'), Queue('repo.sync', Exchange('fanout', 'fanout'), routing_key='repo.sync'), ) app.config['CELERY_ROUTES'] = { 'create_job': { 'queue': 'job.create', 'routing_key': 'job.create', }, 'sync_job': { 'queue': 'job.sync', 'routing_key': 'job.sync', }, 'sync_job_step': { 'queue': 'job.sync', 'routing_key': 'job.sync', }, 'sync_build': { 'queue': 'job.sync', 'routing_key': 'job.sync', }, 'check_repos': { 'queue': 'repo.sync', 'routing_key': 'repo.sync', }, 'sync_repo': { 'queue': 'repo.sync', 'routing_key': 'repo.sync', }, 'run_event_listener': { 'queue': 'events', 'routing_key': 'events', }, 'fire_signal': { 'queue': 'events', 'routing_key': 'events', }, } app.config['EVENT_LISTENERS'] = ( ('changes.listeners.mail.build_finished_handler', 'build.finished'), ('changes.listeners.green_build.build_finished_handler', 'build.finished'), ('changes.listeners.build_revision.revision_created_handler', 'revision.created'), ('changes.listeners.build_finished_notifier.build_finished_handler', 'build.finished'), ('changes.listeners.phabricator_listener.build_finished_handler', 'build.finished'), ('changes.listeners.analytics_notifier.build_finished_handler', 'build.finished'), ('changes.listeners.analytics_notifier.job_finished_handler', 'job.finished'), ('changes.listeners.snapshot_build.build_finished_handler', 'build.finished'), ) # restrict outbound notifications to the given domains app.config['MAIL_DOMAIN_WHITELIST'] = () app.config['DEBUG_TB_ENABLED'] = True # celerybeat must be running for our cleanup tasks to execute # e.g. celery worker -B app.config['CELERYBEAT_SCHEDULE'] = { 'cleanup-tasks': { 'task': 'cleanup_tasks', 'schedule': timedelta(minutes=1), }, 'check-repos': { 'task': 'check_repos', 'schedule': timedelta(minutes=2), }, 'aggregate-flaky-tests': { 'task': 'aggregate_flaky_tests', # Hour 7 GMT is midnight PST, hopefully a time of low load 'schedule': crontab(hour=7, minute=0), }, } app.config['CELERY_TIMEZONE'] = 'UTC' app.config['SENTRY_DSN'] = None app.config['SENTRY_INCLUDE_PATHS'] = [ 'changes', ] app.config['JENKINS_AUTH'] = None app.config['JENKINS_URL'] = None app.config['JENKINS_TOKEN'] = None app.config['JENKINS_CLUSTERS'] = {} app.config['KOALITY_URL'] = None app.config['KOALITY_API_KEY'] = None app.config['GOOGLE_CLIENT_ID'] = None app.config['GOOGLE_CLIENT_SECRET'] = None app.config['GOOGLE_DOMAIN'] = None app.config['REPO_ROOT'] = None app.config['DEFAULT_FILE_STORAGE'] = 'changes.storage.s3.S3FileStorage' app.config['S3_ACCESS_KEY'] = None app.config['S3_SECRET_KEY'] = None app.config['S3_BUCKET'] = None app.config['PHABRICATOR_HOST'] = None app.config['PHABRICATOR_USERNAME'] = None app.config['PHABRICATOR_CERT'] = None app.config['MAIL_DEFAULT_SENDER'] = 'changes@localhost' app.config['BASE_URI'] = 'http://localhost:5000' # if set to a string, most (all?) of the frontend js will make API calls # to the host this string is set to (e.g. http://changes.bigcompany.com) # THIS IS JUST FOR EASIER TESTING IN DEVELOPMENT. Although it won't even # work in prod: you'll have to start chrome with --disable-web-security to # make this work. Override this this in your changes.conf.py file app.config['WEBAPP_USE_ANOTHER_HOST'] = None # points to a file with custom changes content unique to your deployment. # Link to internal tools, provide inline contextual help on your development # process, etc. # e.g. /mycompany/config/changes_content.js app.config['WEBAPP_CUSTOMIZED_CONTENT_FILE'] = None # In minutes, the timeout applied to jobs without a timeout specified at build time. # A timeout should nearly always be specified; this is just a safeguard so that # unspecified timeout doesn't mean "is allowed to run indefinitely". app.config['DEFAULT_JOB_TIMEOUT_MIN'] = 60 # Number of milliseconds a transaction can run before triggering a warning. app.config['TRANSACTION_MS_WARNING_THRESHOLD'] = 2500 app.config.update(config) if _read_config: if os.environ.get('CHANGES_CONF'): # CHANGES_CONF=/etc/changes.conf.py app.config.from_envvar('CHANGES_CONF') else: # Look for ~/.changes/changes.conf.py path = os.path.normpath( os.path.expanduser('~/.changes/changes.conf.py')) app.config.from_pyfile(path, silent=True) # default the DSN for changes-client to the server's DSN app.config.setdefault('CLIENT_SENTRY_DSN', app.config['SENTRY_DSN']) if not app.config['BASE_URI']: raise ValueError('You must set ``BASE_URI`` in your configuration.') parsed_url = urlparse(app.config['BASE_URI']) app.config.setdefault('SERVER_NAME', parsed_url.netloc) app.config.setdefault('PREFERRED_URL_SCHEME', parsed_url.scheme) if app.debug: app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 0 else: app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 30 app.url_map.converters['uuid'] = UUIDConverter # init sentry first sentry.init_app(app) @app.before_request def capture_user(*args, **kwargs): if 'uid' in session: sentry.client.user_context({ 'id': session['uid'], 'email': session['email'], }) api.init_app(app) db.init_app(app) mail.init_app(app) queue.init_app(app) redis.init_app(app) statsreporter.init_app(app) configure_debug_toolbar(app) from raven.contrib.celery import register_signal, register_logger_signal register_signal(sentry.client) register_logger_signal(sentry.client, loglevel=logging.WARNING) # configure debug routes first if app.debug: configure_debug_routes(app) configure_templates(app) # TODO: these can be moved to wsgi app entrypoints configure_api_routes(app) app_static_root = configure_web_routes(app) # blueprint for our new v2 webapp blueprint = create_v2_blueprint(app, app_static_root) app.register_blueprint(blueprint, url_prefix='/v2') configure_jobs(app) configure_transaction_logging(app) rules_file = app.config.get('CATEGORIZE_RULES_FILE') if rules_file: # Fail at startup if we have a bad rules file. categorize.load_rules(rules_file) return app
# If modules are configured, then load and run them if 'MODULES' in rest_api.app.config: rest_api.module_loader = ModulesLoader(rest_api) for prefix, module_info in rest_api.app.config['MODULES'].items(): module_file = importlib.import_module(module_info['import_path']) module = getattr(module_file, module_info['class_name']) rest_api.module_loader.load(module(rest_api, prefix)) rest_api.module_loader.run() else: rest_api.app.logger.warning( 'MODULES isn\'t defined in config. No module will be loaded, then no route ' 'will be defined.') if rest_api.app.config.get('ACTIVATE_PROFILING'): rest_api.app.logger.warning( '=======================================================') rest_api.app.logger.warning( 'activation of the profiling, all query will be slow !') rest_api.app.logger.warning( '=======================================================') from werkzeug.contrib.profiler import ProfilerMiddleware rest_api.app.config['PROFILE'] = True f = open('/tmp/profiler.log', 'a') rest_api.app.wsgi_app = ProfilerMiddleware(rest_api.app.wsgi_app, f, restrictions=[80], profile_dir='/tmp/profile') index(rest_api)
def weasyl_404(): userid = d.get_userid() return web.notfound( d.errorpage(userid, "**404!** The page you requested could not be found.")) app.notfound = weasyl_404 wsgi_app = app.wsgifunc() wsgi_app = mw.InputWrapMiddleware(wsgi_app) wsgi_app = mw.URLSchemeFixingMiddleware(wsgi_app) if d.config_read_bool('profile_responses', section='backend'): from werkzeug.contrib.profiler import ProfilerMiddleware wsgi_app = ProfilerMiddleware(wsgi_app, profile_dir=m.MACRO_SYS_BASE_PATH + 'profile-stats') if d.config_obj.has_option('sentry', 'dsn'): wsgi_app = mw.SentryEnvironmentMiddleware( wsgi_app, d.config_obj.get('sentry', 'dsn')) configure_libweasyl( dbsession=d.sessionmaker, not_found_exception=web.notfound, base_file_path=m.MACRO_SYS_BASE_PATH, staff_config_path=m.MACRO_SYS_STAFF_CONFIG_PATH, media_link_formatter_callback=format_media_link, )
def create_app(): global app_created if not app_created: BlueprintsManager.register(app) Migrate(app, db) app.config.from_object(env('APP_CONFIG', default='config.ProductionConfig')) db.init_app(app) _manager = Manager(app) _manager.add_command('db', MigrateCommand) if app.config['CACHING']: cache.init_app(app, config={'CACHE_TYPE': 'simple'}) else: cache.init_app(app, config={'CACHE_TYPE': 'null'}) stripe.api_key = 'SomeStripeKey' app.secret_key = 'super secret key' app.config['JSONIFY_PRETTYPRINT_REGULAR'] = False app.config['FILE_SYSTEM_STORAGE_FILE_VIEW'] = 'static' app.logger.addHandler(logging.StreamHandler(sys.stdout)) app.logger.setLevel(logging.ERROR) # set up jwt app.config['JWT_AUTH_USERNAME_KEY'] = 'email' app.config['JWT_EXPIRATION_DELTA'] = timedelta(seconds=24 * 60 * 60) app.config['JWT_AUTH_URL_RULE'] = '/auth/session' _jwt = JWT(app, jwt_authenticate, jwt_identity) # setup celery app.config['CELERY_BROKER_URL'] = app.config['REDIS_URL'] app.config['CELERY_RESULT_BACKEND'] = app.config['CELERY_BROKER_URL'] CORS(app, resources={r"/*": {"origins": "*"}}) AuthManager.init_login(app) if app.config['TESTING'] and app.config['PROFILE']: # Profiling app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30]) # development api with app.app_context(): from app.api.admin_statistics_api.events import event_statistics from app.api.auth import auth_routes from app.api.attendees import attendee_misc_routes from app.api.bootstrap import api_v1 from app.api.celery_tasks import celery_routes from app.api.event_copy import event_copy from app.api.exports import export_routes from app.api.imports import import_routes from app.api.uploads import upload_routes from app.api.users import user_misc_routes from app.api.orders import order_misc_routes from app.api.role_invites import role_invites_misc_routes from app.api.auth import ticket_blueprint from app.api.admin_translations import admin_blueprint app.register_blueprint(api_v1) app.register_blueprint(event_copy) app.register_blueprint(upload_routes) app.register_blueprint(export_routes) app.register_blueprint(import_routes) app.register_blueprint(celery_routes) app.register_blueprint(auth_routes) app.register_blueprint(event_statistics) app.register_blueprint(user_misc_routes) app.register_blueprint(attendee_misc_routes) app.register_blueprint(order_misc_routes) app.register_blueprint(role_invites_misc_routes) app.register_blueprint(ticket_blueprint) app.register_blueprint(admin_blueprint) sa.orm.configure_mappers() if app.config['SERVE_STATIC']: app.add_url_rule('/static/<path:filename>', endpoint='static', view_func=app.send_static_file) # sentry if not app_created and 'SENTRY_DSN' in app.config: sentry.init_app(app, dsn=app.config['SENTRY_DSN']) # redis redis_store.init_app(app) # elasticsearch if app.config['ENABLE_ELASTICSEARCH']: client.init_app(app) connections.add_connection('default', client.elasticsearch) with app.app_context(): try: cron_rebuild_events_elasticsearch.delay() except Exception: pass app_created = True return app, _manager, db, _jwt
# from werkzeug.contrib.profiler import ProfilerMiddleware from pistis import app app.config['PROFILE'] = True app.config['SCHEDULER_API_ENABLED'] = False app.wsgi_app = ProfilerMiddleware(app.wsgi_app, profile_dir='.') app.run(host='0.0.0.0')
#!/usr/bin/env python # -*- coding: utf-8 -*- """ profile ~~~~~~~ Implements the profile performance for cobra :author: Feei <wufeifei#wufeifei.com> :homepage: https://github.com/wufeifei/cobra :license: MIT, see LICENSE for more details. :copyright: Copyright (c) 2016 Feei. All rights reserved """ from werkzeug.contrib.profiler import ProfilerMiddleware from app import web __author__ = "lightless" __email__ = "*****@*****.**" web.config['PROFILE'] = True web.wsgi_app = ProfilerMiddleware(web.wsgi_app, restrictions=[30]) web.run(debug=True)
# Initialise unicode: import sys reload(sys) sys.setdefaultencoding('utf-8') # Load the app: from streetsign_server import app # And start the correct server if __name__ == '__main__': if len(sys.argv) == 2: if sys.argv[1] == 'waitress': print("'Production' Server with Waitress.") print("Press <Ctrl-C> to stop") from waitress import serve serve(app, host=__HOST__, port=__PORT__, threads=__THREADS__) elif sys.argv[1] == 'profiler': print("Loading dev server with profiling on.") print("Press <Ctrl-C> to stop") from werkzeug.contrib.profiler import ProfilerMiddleware app.config['PROFILE'] = True app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[20]) app.run(debug=True) else: print("Starting Development Server...") print("Press <Ctrl-C> to stop") app.run(host=__HOST__, port=__PORT__, debug=True)
robots_path = os.path.join(os.path.dirname(__file__), 'files/robots.txt') if os.path.isfile(robots_path): from werkzeug.wsgi import SharedDataMiddleware application = SharedDataMiddleware(application, {'/robots.txt': robots_path}) if settings.is_debug_mode(): from werkzeug.debug import DebuggedApplication application = DebuggedApplication(application) # profiling if settings.debug_level() == settings.DEBUG_AND_PROFILE: from werkzeug.contrib.profiler import ProfilerMiddleware, MergeStream stream = MergeStream( sys.stdout, open(settings.get('global', 'profile_log_path'), 'w')) application = ProfilerMiddleware(application, stream) if __name__ == '__main__': from werkzeug.serving import run_simple from werkzeug.wsgi import SharedDataMiddleware import argparse DEFAULT_PORT = 5000 DEFAULT_ADDR = '127.0.0.1' parser = argparse.ArgumentParser( description='Starts a local development server') parser.add_argument('--port', dest='port_num', action=None, default=DEFAULT_PORT,
from werkzeug.serving import run_simple from werkzeug.wsgi import DispatcherMiddleware from werkzeug.contrib.profiler import ProfilerMiddleware from bts_tools import core, frontend, init import logging log = logging.getLogger(__name__) init() DEBUG = core.config['wsgi_debug'] frontend_app = frontend.create_app() frontend_app.debug = DEBUG application = DispatcherMiddleware(frontend_app) if core.config.get('wsgi_profile', False): application = ProfilerMiddleware(application, profile_dir='/tmp') def main(): print('-' * 100) print('Registered frontend routes:') print(frontend_app.url_map) run_simple('0.0.0.0', 5000, application, use_reloader=DEBUG, use_debugger=DEBUG) if __name__ == '__main__':