def startCompressed(self): """ This is a version of the server optimized for remote ends. The plaintext data is compressed before it is sent out for better transfer speeds.""" print("Starting Yuras Tornado HTTP Server") print("----------------------------------") print "Using config", self.CONFIG self.checkWorkingDirectory() self.storeWebAppDirectories() print("Starting on port %s..." % self.PORT) # Compress all plaintext communications Compress(app) app.config['COMPRESS_MIMETYPES'] = [ 'text/html', 'text/css', 'text/xml', 'application/json', 'application/javascript', 'image/svg+xml' ] app.config['COMPRESS_DEBUG'] = True serverStarted = False while not serverStarted and self.PORT < 10000: # Try adding 1 to the port every time we can't listen on the preferred port. try: http_server = HTTPServer(WSGIContainer(app)) http_server.bind(self.PORT) http_server.start(0) IOLoop.instance().start() serverStarted = True except Exception as e: print e self.PORT += 1
def create_app(): """ init app :return: flask app """ app = flask.Flask(__name__, static_folder=STATIC_PATH) app.url_map.converters['regex'] = RegexConverter app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + SQLITE_PATH app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True db.init_app(app) with app.test_request_context(): db.create_all() app.register_blueprint(bp, url_prefix='/v1') CORS(app) Compress(app) @app.route('/<regex("$"):url>') def index(url=None): """ redirect to index :return: """ return flask.redirect(INDEX_PAGE) return app
def setup(): app = Flask('nipap.xmlrpc') Compress(app) handler = XMLRPCHandler('XMLRPC') handler.connect(app, '/RPC2') handler.connect(app, '/XMLRPC') handler.register_instance(NipapXMLRPC()) return app
def maybe_enable_dev_mode(app): if not 'DEVELOPMENT' in os.environ: return False app.debug = True app.logger.debug("Debug mode enabled") Compress(app) # Enable gzip #app.config['ASSETS_DEBUG'] = True # Don't pack assets (css/js) #app.wsgi_app = ProfilerMiddleware(app.wsgi_app) # Enable profiler return True
def register_extensions(app): db_config = { 'host': app.config.get('USER_DB_HOST', ''), 'name': app.config.get('USER_DB_NAME', ''), 'user': app.config.get('USER_DB_USER', ''), 'password': app.config.get('USER_DB_PASSWORD', ''), } Database.connect(**db_config) cache_bust.init_cache_busting(app) Compress(app) HTMLMIN(app)
def create_app(priority_settings=None): # Initialising a Flask App app = Flask(__name__, static_url_path='') heroku = Heroku() compress = Compress() # Load configuraiton from settings file app.config.from_object(settings) app.config.from_object(priority_settings) # Initialise database db.init_app(app) # Using Heroku as deployment server heroku.init_app(app) # Gziping responses from app compress.init_app(app) # Using Flask-Login login_manager = LoginManager() login_manager.init_app(app) login_manager.login_view = 'accounts.login' @login_manager.user_loader def load_user(id): return User.query.get(int(id)) # Registering Blueprints in an effor to make app modular app.register_blueprint(index_blueprint) app.register_blueprint(todo_blueprint) app.register_blueprint(user_blueprint) app.register_blueprint(api_todo_blueprint) with app.app_context(): db.create_all() return app
def create_app(config_name): """ An "application factory" used to initialise the app object. Configs and extensions are loaded here. See Flask docs for additional information: http://flask.pocoo.org/docs/0.10/patterns/appfactories/ Parameters ---------- config_name : str The configuration to run. Currently should be one of "development" or "default". Returns ------- app : flask.app.Flask Flask application object. """ app = Flask(__name__, static_url_path="/static") app.config.from_object(config[config_name]) bootstrap = Bootstrap() compress = Compress() bootstrap.init_app(app) compress.init_app(app) mongo.init_app(app) from app.main import main as main_blueprint app.register_blueprint(main_blueprint) from app.dbapi import dbapi as dbapi_blueprint app.register_blueprint(dbapi_blueprint, url_prefix="/api") return app
def create_app(package_name, config_filename, **kwargs): app = Flask(package_name, **kwargs) app.config.from_pyfile(config_filename) app.session_interface = SecureCookieSessionInterface() app.session_interface.key_derivation = None # 添加错误日志处理模块 add_logger_handler(app) # 将后端返回的json数据进行压缩减少传输 Compress(app) return app
def create_app(config_mode=None, config_file=None): """ Creates the Flask application Kwargs: config_mode (str): The configuration mode. Must be a `class` in `config.py`. One of ('Production', 'Development', 'Test') config_file (str): The configuration file. Returns: (obj): Flask application Examples: >>> create_app('Test') <Flask 'app'> """ app = Flask(__name__) mgr = APIManager(app, flask_sqlalchemy_db=db) if config_mode: app.config.from_object(getattr(config, config_mode)) elif config_file: app.config.from_pyfile(config_file) else: app.config.from_envvar('APP_SETTINGS', silent=True) db.init_app(app) CORS(app) Compress(app) @app.route('%s/' % app.config['API_URL_PREFIX']) def home(): return 'Welcome to the HDX File Proxy API!' kwargs = { 'methods': app.config['API_METHODS'], 'validation_exceptions': API_EXCEPTIONS, 'max_results_per_page': app.config['API_MAX_RESULTS_PER_PAGE'], 'url_prefix': app.config['API_URL_PREFIX'] } # Create API endpoints from `models.py`. Each model is available at # the endpoint `/<tablename>`. create_api = partial(mgr.create_api, **kwargs) with app.app_context(): map(create_api, _get_tables()) return app
def setUp(self): self.app = Flask(__name__) self.app.testing = True small_path = os.path.join(os.getcwd(), 'tests', 'templates', 'small.html') large_path = os.path.join(os.getcwd(), 'tests', 'templates', 'large.html') self.small_size = os.path.getsize(small_path) - 1 self.large_size = os.path.getsize(large_path) - 1 Compress(self.app) @self.app.route('/small/') def small(): return render_template('small.html') @self.app.route('/large/') def large(): return render_template('large.html')
def create_app(config_name): app = Flask(__name__, static_url_path='') # This first loads the configuration from eg. config['development'] which corresponds to the DevelopmentConfig class in the config.py app.config.from_object(config[config_name]) # Then you can override the values with the contents of the file the OPENTARGETS_API_LOCAL_SETTINGS environment variable points to. # For eg: # $ export OPENTARGETS_API_LOCAL_SETTINGS=/path/to/settings.cfg # # where settings.cfg looks like: # # DEBUG = False # SECRET_KEY = 'foo' # app.config.from_envvar("OPENTARGETS_API_LOCAL_SETTINGS", silent=True) config[config_name].init_app(app) api_version = app.config['API_VERSION'] api_version_minor = app.config['API_VERSION_MINOR'] app.logger.info('looking for elasticsearch at: %s' % app.config['ELASTICSEARCH_URL']) app.extensions['redis-core'] = Redis(app.config['REDIS_SERVER_PATH'], db=0) #served data app.extensions['redis-service'] = Redis(app.config['REDIS_SERVER_PATH'], db=1) #cache, rate limit and internal things app.extensions['redis-user'] = Redis(app.config['REDIS_SERVER_PATH'], db=2)# user info '''setup cache''' app.extensions['redis-service'].config_set('save','') app.extensions['redis-service'].config_set('appendonly', 'no') icache = InternalCache(app.extensions['redis-service'], str(api_version_minor)) ip2org = IP2Org(icache) if app.config['ELASTICSEARCH_URL']: es = Elasticsearch(app.config['ELASTICSEARCH_URL'], # # sniff before doing anything # sniff_on_start=True, # # refresh nodes after a node fails to respond # sniff_on_connection_fail=True, # # and also every 60 seconds # sniffer_timeout=60 timeout=60 * 20, maxsize=32, ) else: es = None '''elasticsearch handlers''' app.extensions['esquery'] = esQuery(es, DataTypes(app), DataSourceScoring(app), index_data=app.config['ELASTICSEARCH_DATA_INDEX_NAME'], index_efo=app.config['ELASTICSEARCH_EFO_LABEL_INDEX_NAME'], index_eco=app.config['ELASTICSEARCH_ECO_INDEX_NAME'], index_genename=app.config['ELASTICSEARCH_GENE_NAME_INDEX_NAME'], index_expression=app.config['ELASTICSEARCH_EXPRESSION_INDEX_NAME'], index_reactome=app.config['ELASTICSEARCH_REACTOME_INDEX_NAME'], index_association=app.config['ELASTICSEARCH_DATA_ASSOCIATION_INDEX_NAME'], index_search=app.config['ELASTICSEARCH_DATA_SEARCH_INDEX_NAME'], index_relation=app.config['ELASTICSEARCH_DATA_RELATION_INDEX_NAME'], docname_data=app.config['ELASTICSEARCH_DATA_DOC_NAME'], docname_efo=app.config['ELASTICSEARCH_EFO_LABEL_DOC_NAME'], docname_eco=app.config['ELASTICSEARCH_ECO_DOC_NAME'], docname_genename=app.config['ELASTICSEARCH_GENE_NAME_DOC_NAME'], docname_expression=app.config['ELASTICSEARCH_EXPRESSION_DOC_NAME'], docname_reactome=app.config['ELASTICSEARCH_REACTOME_REACTION_DOC_NAME'], docname_association=app.config['ELASTICSEARCH_DATA_ASSOCIATION_DOC_NAME'], docname_search=app.config['ELASTICSEARCH_DATA_SEARCH_DOC_NAME'], # docname_search_target=app.config['ELASTICSEARCH_DATA_SEARCH_TARGET_DOC_NAME'], # docname_search_disease=app.config['ELASTICSEARCH_DATA_SEARCH_DISEASE_DOC_NAME'], docname_relation=app.config['ELASTICSEARCH_DATA_RELATION_DOC_NAME'], log_level=app.logger.getEffectiveLevel(), cache=icache ) app.extensions['es_access_store'] = esStore(es, eventlog_index=app.config['ELASTICSEARCH_LOG_EVENT_INDEX_NAME'], ip2org=ip2org, ) '''mixpanel handlers''' if Config.MIXPANEL_TOKEN: mp = Mixpanel(Config.MIXPANEL_TOKEN, consumer=AsyncBufferedConsumer()) app.extensions['mixpanel']= mp app.extensions['mp_access_store'] = MixPanelStore(mp, ip2org=ip2org, ) app.extensions['proxy'] = ProxyHandler(allowed_targets=app.config['PROXY_SETTINGS']['allowed_targets'], allowed_domains=app.config['PROXY_SETTINGS']['allowed_domains'], allowed_request_domains=app.config['PROXY_SETTINGS']['allowed_request_domains']) # basepath = app.config['PUBLIC_API_BASE_PATH']+api_version # cors = CORS(app, resources=r'/api/*', allow_headers='Content-Type,Auth-Token') ''' define cache''' # cache = Cache(config={'CACHE_TYPE': 'simple'}) # cache.init_app(latest_blueprint) # latest_blueprint.cache = cache # latest_blueprint.extensions['cache'] = cache # app.cache = SimpleCache() app.cache = FileSystemCache('/tmp/cttv-rest-api-cache', threshold=100000, default_timeout=60*60, mode=777) '''Set usage limiter ''' # limiter = Limiter(global_limits=["2000 per hour", "20 per second"]) # limiter.init_app(app)# use redis to store limits '''Load api keys in redis''' rate_limit_file = app.config['USAGE_LIMIT_PATH'] if not os.path.exists(rate_limit_file): rate_limit_file = '../'+rate_limit_file csvfile = None if Config.GITHUB_AUTH_TOKEN: r = requests.get('https://api.github.com/repos/opentargets/rest_api_auth/contents/rate_limit.csv', headers = {'Authorization': 'token %s'%Config.GITHUB_AUTH_TOKEN, 'Accept': 'application/vnd.github.v3.raw'}) if r.ok: csvfile = r.text.split('\n') app.logger.info('Retrieved rate limit file from github remote') else: app.logger.warning('Cannot retrieve rate limit file from remote, SKIPPED!') elif os.path.exists(rate_limit_file): csvfile = open(rate_limit_file) app.logger.info('Using dummy rate limit file') if csvfile is None: app.logger.error('cannot find rate limit file: %s. RATE LIMIT QUOTA LOAD SKIPPED!'%rate_limit_file) else: reader = csv.DictReader(csvfile) for row in reader: auth_key = AuthKey(**row) app.extensions['redis-user'].hmset(auth_key.get_key(), auth_key.__dict__) try: csvfile.close() except: pass app.logger.info('succesfully loaded rate limit file') '''load ip name resolution''' ip_resolver = defaultdict(lambda: "PUBLIC") ip_list_file = app.config['IP_RESOLVER_LIST_PATH'] if not os.path.exists(ip_list_file): ip_list_file = '../' + ip_list_file if os.path.exists(ip_list_file): with open(ip_list_file) as csvfile: reader = csv.DictReader(csvfile) for row in reader: net = IPNetwork(row['ip']) ip_resolver[net] = row['org'] else: app.logger.warning('cannot find IP list for IP resolver. All traffic will be logged as PUBLIC') app.config['IP_RESOLVER'] = ip_resolver '''compress http response''' compress = Compress() compress.init_app(app) latest_blueprint = Blueprint('latest', __name__) current_version_blueprint = Blueprint(str(api_version), __name__) current_minor_version_blueprint = Blueprint(str(api_version_minor), __name__) specpath = '/cttv' if app.config['PROFILE'] == True: from werkzeug.contrib.profiler import ProfilerMiddleware app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30]) '''set the right prefixes''' create_api(latest_blueprint, api_version, specpath) create_api(current_version_blueprint, api_version, specpath) create_api(current_minor_version_blueprint, api_version_minor, specpath) # app.register_blueprint(latest_blueprint, url_prefix='/latest/platform') app.register_blueprint(current_version_blueprint, url_prefix='/v'+str(api_version) + '/platform') app.register_blueprint(current_minor_version_blueprint, url_prefix='/v'+str(api_version_minor) + '/platform') '''serve the static docs''' try: ''' NOTE: this file gets created only at deployment time ''' openapi_def = yaml.load(file('app/static/openapi.yaml', 'r')) app.logger.info('parsing swagger from static/openapi.yaml') except IOError: '''if we are not deployed, then simply use the template''' openapi_def = yaml.load(file('openapi.template.yaml', 'r')) app.logger.error('parsing swagger from openapi.template.yaml') with open("api-description.md", "r") as f: desc = f.read() openapi_def['info']['description'] = desc openapi_def['basePath'] = '/v%s' % str(api_version) @app.route('/v%s/platform/swagger' % str(api_version)) def serve_swagger(apiversion=api_version): return jsonify(openapi_def) @app.route('/v%s/platform/docs' % str(api_version)) def render_redoc(apiversion=api_version): return render_template('docs.html',api_version=apiversion) '''pre and post-request''' @app.before_request def before_request(): g.request_start = datetime.now() @app.after_request def after(resp): try: rate_limiter = RateLimiter() now = datetime.now() took = (now - g.request_start).total_seconds()*1000 if took > 500: cache_time = str(int(3600*took))# set cache to last one our for each second spent in the request resp.headers.add('X-Accel-Expires', cache_time) took = int(round(took)) LogApiCallWeight(took) # if took < RateLimiter.DEFAULT_CALL_WEIGHT: # took = RateLimiter.DEFAULT_CALL_WEIGHT current_values = increment_call_rate(took,rate_limiter) now = datetime.now() ceil10s=round(ceil_dt_to_future_time(now, 10),2) ceil1h=round(ceil_dt_to_future_time(now, 3600),2) usage_left_10s = rate_limiter.short_window_rate-current_values['short'] usage_left_1h = rate_limiter.long_window_rate - current_values['long'] min_ceil = ceil10s if usage_left_1h <0: min_ceil = ceil1h if (usage_left_10s < 0) or (usage_left_1h <0): resp.headers.add('Retry-After', min_ceil) resp.headers.add('X-API-Took', took) resp.headers.add('X-Usage-Limit-10s', rate_limiter.short_window_rate) resp.headers.add('X-Usage-Limit-1h', rate_limiter.long_window_rate) resp.headers.add('X-Usage-Remaining-10s', usage_left_10s) resp.headers.add('X-Usage-Remaining-1h', usage_left_1h) # resp.headers.add('X-Usage-Limit-Reset-10s', ceil10s) # resp.headers.add('X-Usage-Limit-Reset-1h', ceil1h) resp.headers.add('Access-Control-Allow-Origin', '*') resp.headers.add('Access-Control-Allow-Headers','Content-Type,Auth-Token') resp.headers.add('Access-Control-Allow-Methods', 'GET,PUT,POST,DELETE,OPTIONS') if do_not_cache(request):# do not cache in the browser resp.headers.add('Cache-Control', "no-cache, must-revalidate, max-age=0") else: resp.headers.add('Cache-Control', "no-transform, public, max-age=%i, s-maxage=%i"%(took*1800/1000, took*9000/1000)) return resp except Exception as e: app.logger.exception('failed request teardown function', str(e)) return resp # Override the HTTP exception handler. app.handle_http_exception = get_http_exception_handler(app) return app
#Initializing Flask app = Flask(__name__) app.config.from_object(server_config) app.wsgi_app = StreamConsumingMiddleware(app.wsgi_app) app.logger.handlers = [] app.logger.addHandler(create_logging_handler(app.config)) app.logger.addHandler(create_console_logger_handler()) app.userstorage = UserStorage() load_users(app.userstorage) #Enables GZIP compression compressor = Compress() compressor.init_app(app) #Expose markdown trough application object app.markdown = Markdown() app.storage = RecipeStorage(directory=app.config['RECIPE_DIRECTORY'], backup=True, logger=app.logger.info) #Jinja Context Processor @app.context_processor def inject_template_variables(): return dict(base_path=app.config['BASE_PATH'], upload_directory=app.config['UPLOAD_DIRECTORY']) @app.context_processor
bipbalance_client = Flask(__name__) bipbalance_client.secret_key = config.SECRET_KEY class RegexConverter(BaseConverter): def __init__(self, url_map, *items): super(RegexConverter, self).__init__(url_map) self.regex = items[0] # Use the RegexConverter function as a converter # method for mapped urls bipbalance_client.url_map.converters['regex'] = RegexConverter bipbalance_client.register_blueprint(home_blueprint.page) Compress(bipbalance_client) cache.init_app(bipbalance_client) @bipbalance_client.errorhandler(404) def page_not_found(e): g.config = config return render_template('404.html'), 404 @bipbalance_client.errorhandler(500) def page_not_found(e): g.config = config return render_template('500.html'), 500 def gevent(): from gevent.wsgi import WSGIServer
def make_app(): cors = CORS(origins=[ 'https://app.communityshare.us:443', # production app 'http://communityshare.localhost:5000', # local dev angular app 'http://communityshare.localhost:8000', # local dev elm app 'https://dmsnell.github.io/cs-elm/', # live elm app ]) compress = Compress() webpack = Webpack() app = Flask(__name__, template_folder='../static/') app.response_class = JsonifyDictResponse app.config['SQLALCHEMY_DATABASE_URI'] = config.DB_CONNECTION app.config['WEBPACK_ASSETS_URL'] = config.WEBPACK_ASSETS_URL app.config['WEBPACK_MANIFEST_PATH'] = config.WEBPACK_MANIFEST_PATH cors.init_app(app) compress.init_app(app) webpack.init_app(app) if config.SSL != 'NO_SSL': flask_sslify.SSLify(app) app.wsgi_app = ReverseProxied(app.wsgi_app) register_user_routes(app) register_search_routes(app) register_conversation_routes(app) register_share_routes(app) register_survey_routes(app) register_email_routes(app) register_statistics_routes(app) community_share.api.register_routes(app) @app.teardown_appcontext def close_db_connection(exception): store.session.remove() @app.errorhandler(BadRequest) def handle_bad_request(error): return str(error), HTTPStatus.BAD_REQUEST app.errorhandler(Unauthorized)(jsonify_with_code(HTTPStatus.UNAUTHORIZED)) app.errorhandler(Forbidden)(jsonify_with_code(HTTPStatus.FORBIDDEN)) app.errorhandler(NotFound)(jsonify_with_code(HTTPStatus.NOT_FOUND)) app.errorhandler(InternalServerError)(jsonify_with_code(HTTPStatus.INTERNAL_SERVER_ERROR)) @app.route('/static/build/<path:filename>') def build_static(filename): return send_from_directory( app.root_path + '/../static/build/', filename, cache_timeout=YEAR_IN_SECONDS, ) @app.route('/static/js/<path:filename>') def js_static(filename): return send_from_directory(app.root_path + '/../static/js/', filename) @app.route('/static/fonts/<path:filename>') def fonts_static(filename): return send_from_directory(app.root_path + '/../static/fonts/', filename) @app.route('/static/css/<path:filename>') def css_static(filename): return send_from_directory(app.root_path + '/../static/css/', filename) @app.route('/static/templates/footer.html') def footer_template(): return render_template('templates/footer.html', config=config) @app.route('/static/templates/<path:filename>') def templates_static(filename): return send_from_directory(app.root_path + '/../static/templates/', filename) @app.route('/') def index(): logger.debug('rendering index') return render_template('index.html', config=config) return app
def test_delayed_init(self): compress = Compress() compress.init_app(self.app)
def serve(): compress = Compress() app = Flask(__name__, static_folder='static') compress.init_app(app) @app.route('/top_identities.json', methods=['GET', 'POST']) @json_api def top_identities(): source = request.args.get('source', 'celebrity') emotion = request.args.get('emotion', 'happy') key = '%s x %s' % (source, emotion) text = redis().hget('top_identities.json', key) if not text: text = json.dumps(back.top_identities(source, emotion)) redis().hset('top_identities.json', key, text) return text @app.route('/top_articles.json', methods=['GET', 'POST']) def top_articles(): source = request.args.get('source', 'celebrity') emotion = request.args.get('emotion', 'happy') key = '%s x %s' % (source, emotion) text = redis().hget('top_articles.json', key) if not text: text = json.dumps(back.top_articles(source, emotion), indent=4) redis().hset('top_articles.json', key, text) return text @report_error @json_api @app.route('/all_articles.json', methods=['GET', 'POST']) def all_articles(): source = request.args.get( 'source', '20140629_government_politician_nyt') model = request.args.get('model', 'vectors_50d') text = redis().get('all_articles.json') if not text: text = redis_call( 'all_articles.json', {'source': 'hdfs:///%s' % source, 'model': model}) redis().set('all_articles.json', text) return text #@report_error #@json_api @app.route('/all_identities.json', methods=['GET', 'POST']) def all_identities(): source = request.args.get( 'source', '20140629_government_politician_nyt') model = request.args.get('model', 'vectors_50d') text = redis_call( 'all_identities.json', {'source': 'hdfs:///%s' % source, 'model': model}) response = Response(text, mimetype='application/json') response.headers['Access-Control-Allow-Origin'] = '*' return response @json_api @report_error @app.route('/confusion.json', methods=['GET', 'POST']) def confusion(): key = 'json' text = redis().hget('confusion.json', key) if not text: text = redis_call( 'confusion.json', {'source': 'semeval', 'model': 'twitter_vectors_20d'}) redis().hset('confusion.json', key, text) return text # running the api app.run(host='0.0.0.0', port=int( os.getenv('PORT', '80')), debug=True, threaded=True)
def make_app(): cors = CORS(origins=[ 'https://app.communityshare.us:443', # production app 'http://communityshare.localhost:5000', # local dev angular app 'http://communityshare.localhost:8000', # local dev elm app 'https://dmsnell.github.io/cs-elm/', # live elm app ]) compress = Compress() webpack = Webpack() app = Flask(__name__, template_folder='../static/') app.config['SQLALCHEMY_DATABASE_URI'] = config.DB_CONNECTION app.config['WEBPACK_ASSETS_URL'] = config.WEBPACK_ASSETS_URL app.config['WEBPACK_MANIFEST_PATH'] = config.WEBPACK_MANIFEST_PATH cors.init_app(app) compress.init_app(app) webpack.init_app(app) if config.SSL != 'NO_SSL': flask_sslify.SSLify(app) app.wsgi_app = ReverseProxied(app.wsgi_app) register_user_routes(app) register_search_routes(app) register_conversation_routes(app) register_share_routes(app) register_survey_routes(app) register_email_routes(app) register_statistics_routes(app) community_share.api.register_routes(app) @app.teardown_appcontext def close_db_connection(exception): store.session.remove() @app.errorhandler(BadRequest) def handle_bad_request(error): return str(error), HTTPStatus.BAD_REQUEST @app.route('/static/build/<path:filename>') def build_static(filename): return send_from_directory( app.root_path + '/../static/build/', filename, cache_timeout=YEAR_IN_SECONDS, ) @app.route('/static/js/<path:filename>') def js_static(filename): return send_from_directory(app.root_path + '/../static/js/', filename) @app.route('/static/fonts/<path:filename>') def fonts_static(filename): return send_from_directory(app.root_path + '/../static/fonts/', filename) @app.route('/static/css/<path:filename>') def css_static(filename): return send_from_directory(app.root_path + '/../static/css/', filename) @app.route('/static/templates/footer.html') def footer_template(): return render_template('templates/footer.html', config=config) @app.route('/static/templates/<path:filename>') def templates_static(filename): return send_from_directory(app.root_path + '/../static/templates/', filename) @app.route('/') def index(): logger.debug('rendering index') return render_template('index.html', config=config) return app
return self.render('admin/email_render.html', template_names=template_names) csrf = CsrfProtect() db = SQLAlchemy() babel = Babel() migrate = Migrate() mail = Mail() admin = Admin(template_mode='bootstrap3', index_view=MyHomeView()) admin.add_view(AnalyticsView(name="Analytics", endpoint='analytics')) admin.add_view(EmailRenderView(name="Email Renders", endpoint='email-renders')) security = Security() api_manager = APIManager() compress = Compress() gravatar = Gravatar(size=42, rating='g', default='mm', force_default=False, use_ssl=True, base_url=None) user_images = UploadSet('userimages', IMAGES) review_images = UploadSet('reviewimages', IMAGES) shop_images = UploadSet('shopimages', IMAGES) resize = Resize() assets = Environment() js_assets = Bundle('js/main.js', filters='rjsmin', output='js/main.min.js') css_assets = Bundle('css/global.css',
def create_app(config_path): # setup flask app = Flask('jazzband') @app.errorhandler(404) def page_not_found(e): return render_template('error.html'), 404 @app.errorhandler(403) def forbidden(error): return render_template('forbidden.html'), 403 @app.errorhandler(500) def error(error): return render_template('error.html'), 500 @app.route('/favicon.ico') def favicon(): filename = 'favicon.ico' cache_timeout = app.get_send_file_max_age(filename) return send_from_directory(os.path.join(app.static_folder, 'favicons'), filename, mimetype='image/vnd.microsoft.icon', cache_timeout=cache_timeout) # load decoupled config variables app.config.from_object(config_path) from .models import db, User, Project, EmailAddress db.init_app(app) from flask_migrate import Migrate Migrate(app, db) from .admin import admin, JazzbandModelView admin.init_app(app) admin.add_view(JazzbandModelView(User, db.session)) admin.add_view(JazzbandModelView(Project, db.session)) admin.add_view(JazzbandModelView(EmailAddress, db.session)) if 'OPBEAT_SECRET_TOKEN' in os.environ: from opbeat.contrib.flask import Opbeat Opbeat(app, logging=True) if not app.debug: from werkzeug.contrib.fixers import ProxyFix app.wsgi_app = ProxyFix(app.wsgi_app) from whitenoise import WhiteNoise app.wsgi_app = WhiteNoise( app.wsgi_app, root=app.static_folder, prefix=app.static_url_path, ) # setup github-flask from .github import github github.init_app(app) from .hooks import hooks hooks.init_app(app) # setup webassets from .assets import assets assets.init_app(app) # setup session store from flask.ext.session import Session Session(app) from flask.ext.compress import Compress Compress(app) from .content import about_pages, news_pages, content about_pages.init_app(app) news_pages.init_app(app) app.register_blueprint(content) from .account import account, login_manager app.register_blueprint(account) login_manager.init_app(app) from .members import members app.register_blueprint(members) from .projects import projects app.register_blueprint(projects) @app.context_processor def app_context_processor(): return { 'about': about_pages, 'news': news_pages, 'User': User, 'Project': Project, } @app.after_request def add_vary_header(response): response.vary.add('Cookie') response.headers['Jazzband'] = "We're all part of the band" return response return app
def create_app(config_mode=None, config_file=None): """ Creates the Flask application Kwargs: config_mode (str): The configuration mode. Must be a `class` in `config.py`. One of ('Production', 'Development', 'Test', 'Docker') config_file (str): The configuration file. Returns: (obj): Flask application Examples: >>> create_app('Test') <Flask 'app'> """ app = Flask(__name__) app.register_blueprint(blueprint) mgr = APIManager(app, flask_sqlalchemy_db=db) cache_config = {} if config_mode: app.config.from_object(getattr(config, config_mode)) elif config_file: app.config.from_pyfile(config_file) else: app.config.from_envvar('APP_SETTINGS', silent=True) memcached_servers = getenv('MEMCACHIER_SERVERS', getenv('MEMCACHE_SERVERS')) if app.config['PROD'] and app.config['MEMCACHE']: cache_config['CACHE_TYPE'] = 'spreadsaslmemcachedcache' cache_config['CACHE_MEMCACHED_SERVERS'] = [memcached_servers] cache_config['CACHE_MEMCACHED_USERNAME'] = getenv( 'MEMCACHIER_USERNAME') cache_config['CACHE_MEMCACHED_PASSWORD'] = getenv( 'MEMCACHIER_PASSWORD') elif app.config['MEMCACHE']: cache_config['CACHE_TYPE'] = 'memcached' cache_config['CACHE_MEMCACHED_SERVERS'] = [memcached_servers] else: cache_config['CACHE_TYPE'] = 'simple' cache.init_app(app, config=cache_config) db.init_app(app) CORS(app) Compress(app) @app.route('/') def home(): return 'Welcome to the HDX Age API!' kwargs = { 'methods': app.config['API_METHODS'], 'validation_exceptions': API_EXCEPTIONS, 'allow_functions': app.config['API_ALLOW_FUNCTIONS'], 'allow_patch_many': app.config['API_ALLOW_PATCH_MANY'], 'max_results_per_page': app.config['API_MAX_RESULTS_PER_PAGE'], 'url_prefix': app.config['API_URL_PREFIX'] } # Create API endpoints from `models.py`. Each model is available at # the endpoint `/<tablename>`. create_api = partial(mgr.create_api, **kwargs) with app.app_context(): map(create_api, _get_tables()) return app
security = Security(app, user_datastore, register_form=ExtendedRegisterForm, login_form=ExtendedLoginForm) """Auto add role and creation time to new users""" @user_registered.connect_via(app) def user_registered_sighandler(app, user, confirm_token): default_role = user_datastore.find_role('user') user_datastore.add_role_to_user(user, default_role) user.created_at = datetime.utcnow() db.session.commit() """Add various extensions""" mail = Mail(app) assets = Environment(app) compress = Compress(app) """If we are in prod, don't autobuild""" assets.auto_build = app.config['ASSETS_AUTO_BUILD'] assets.manifest = 'file' """Configure blueprints in views.""" for blueprint in DEFAULT_BLUEPRINTS: app.register_blueprint(blueprint) def configure_template_filters(app): @app.template_filter() def time_ago(value): return pretty_date(value)
def setUp(self): self.app = Flask(__name__) self.app.testing = True Compress(self.app)
def get_app(): app = Flask(__name__) Compress(app) return app
def create_app(config_name): app = Flask(__name__, static_url_path='') app.config.from_object(config[config_name]) app.config.from_envvar("OPENTARGETS_API_LOCAL_SETTINGS", silent=True) config[config_name].init_app(app) api_version = app.config['API_VERSION'] api_version_minor = app.config['API_VERSION_MINOR'] # log_level = logging.INFO # if app.config['DEBUG']: # log_level = logging.DEBUG # Flask has a default logger which works well and pushes to stderr # if you want to add different handlers (to file, or logstash, or whatever) # you can use code similar to the one below and set the error level accordingly. # logHandler = logging.StreamHandler() # formatter = jsonlogger.JsonFormatter() # logHandler.setFormatter(formatter) # loghandler.setLevel(logging.INFO) # app.logger.addHandler(logHandler) # or for LOGSTASH # app.logger.addHandler(logstash.LogstashHandler(app.config['LOGSTASH_HOST'], app.config['LOGSTASH_PORT'], version=1)) app.logger.info('looking for elasticsearch at: %s' % app.config['ELASTICSEARCH_URL']) print('looking for elasticsearch at: %s' % app.config['ELASTICSEARCH_URL']) app.extensions['redis-core'] = Redis(app.config['REDIS_SERVER_PATH'], db=0) #served data app.extensions['redis-service'] = Redis( app.config['REDIS_SERVER_PATH'], db=1) #cache, rate limit and internal things app.extensions['redis-user'] = Redis(app.config['REDIS_SERVER_PATH'], db=2) # user info '''setup cache''' app.extensions['redis-service'].config_set('save', '') app.extensions['redis-service'].config_set('appendonly', 'no') icache = InternalCache(app.extensions['redis-service'], str(api_version_minor)) ip2org = IP2Org(icache) es = Elasticsearch( app.config['ELASTICSEARCH_URL'], # # sniff before doing anything # sniff_on_start=True, # # refresh nodes after a node fails to respond # sniff_on_connection_fail=True, # # and also every 60 seconds # sniffer_timeout=60 timeout=60 * 20, maxsize=100, ) '''elasticsearch handlers''' app.extensions['esquery'] = esQuery( es, DataTypes(app), DataSourceScoring(app), index_data=app.config['ELASTICSEARCH_DATA_INDEX_NAME'], index_efo=app.config['ELASTICSEARCH_EFO_LABEL_INDEX_NAME'], index_eco=app.config['ELASTICSEARCH_ECO_INDEX_NAME'], index_genename=app.config['ELASTICSEARCH_GENE_NAME_INDEX_NAME'], index_expression=app.config['ELASTICSEARCH_EXPRESSION_INDEX_NAME'], index_reactome=app.config['ELASTICSEARCH_REACTOME_INDEX_NAME'], index_association=app. config['ELASTICSEARCH_DATA_ASSOCIATION_INDEX_NAME'], index_search=app.config['ELASTICSEARCH_DATA_SEARCH_INDEX_NAME'], index_relation=app.config['ELASTICSEARCH_DATA_RELATION_INDEX_NAME'], docname_data=app.config['ELASTICSEARCH_DATA_DOC_NAME'], docname_efo=app.config['ELASTICSEARCH_EFO_LABEL_DOC_NAME'], docname_eco=app.config['ELASTICSEARCH_ECO_DOC_NAME'], docname_genename=app.config['ELASTICSEARCH_GENE_NAME_DOC_NAME'], docname_expression=app.config['ELASTICSEARCH_EXPRESSION_DOC_NAME'], docname_reactome=app. config['ELASTICSEARCH_REACTOME_REACTION_DOC_NAME'], docname_association=app. config['ELASTICSEARCH_DATA_ASSOCIATION_DOC_NAME'], docname_search=app.config['ELASTICSEARCH_DATA_SEARCH_DOC_NAME'], # docname_search_target=app.config['ELASTICSEARCH_DATA_SEARCH_TARGET_DOC_NAME'], # docname_search_disease=app.config['ELASTICSEARCH_DATA_SEARCH_DISEASE_DOC_NAME'], docname_relation=app.config['ELASTICSEARCH_DATA_RELATION_DOC_NAME'], log_level=app.logger.getEffectiveLevel(), cache=icache) app.extensions['es_access_store'] = esStore( es, eventlog_index=app.config['ELASTICSEARCH_LOG_EVENT_INDEX_NAME'], ip2org=ip2org, ) '''mixpanel handlers''' if Config.MIXPANEL_TOKEN: mp = Mixpanel(Config.MIXPANEL_TOKEN, consumer=AsyncBufferedConsumer()) app.extensions['mixpanel'] = mp app.extensions['mp_access_store'] = MixPanelStore( mp, ip2org=ip2org, ) app.extensions['proxy'] = ProxyHandler( allowed_targets=app.config['PROXY_SETTINGS']['allowed_targets'], allowed_domains=app.config['PROXY_SETTINGS']['allowed_domains'], allowed_request_domains=app.config['PROXY_SETTINGS'] ['allowed_request_domains']) basepath = app.config['PUBLIC_API_BASE_PATH'] + api_version # cors = CORS(app, resources=r'/api/*', allow_headers='Content-Type,Auth-Token') ''' define cache''' # cache = Cache(config={'CACHE_TYPE': 'simple'}) # cache.init_app(latest_blueprint) # latest_blueprint.cache = cache # latest_blueprint.extensions['cache'] = cache # app.cache = SimpleCache() app.cache = FileSystemCache('/tmp/cttv-rest-api-cache', threshold=100000, default_timeout=60 * 60, mode=777) '''Set usage limiter ''' # limiter = Limiter(global_limits=["2000 per hour", "20 per second"]) # limiter.init_app(app)# use redis to store limits '''Load api keys in redis''' rate_limit_file = app.config['USAGE_LIMIT_PATH'] if not os.path.exists(rate_limit_file): rate_limit_file = '../' + rate_limit_file if os.path.exists(rate_limit_file): with open(rate_limit_file) as csvfile: reader = csv.DictReader(csvfile) for row in reader: auth_key = AuthKey(**row) app.extensions['redis-user'].hmset(auth_key.get_key(), auth_key.__dict__) print('INFO - succesfully loaded rate limit file') else: print('ERROR - cannot find rate limit file') app.logger.error( 'cannot find rate limit file: %s. RATE LIMIT QUOTA LOAD SKIPPED!' % rate_limit_file) '''load ip name resolution''' ip_resolver = defaultdict(lambda: "PUBLIC") ip_list_file = app.config['IP_RESOLVER_LIST_PATH'] if not os.path.exists(ip_list_file): ip_list_file = '../' + ip_list_file if os.path.exists(ip_list_file): with open(ip_list_file) as csvfile: reader = csv.DictReader(csvfile) for row in reader: net = IPNetwork(row['ip']) ip_resolver[net] = row['org'] else: app.logger.warning( 'cannot find IP list for IP resolver. All traffic will be logged as PUBLIC' ) app.config['IP_RESOLVER'] = ip_resolver '''compress http response''' compress = Compress() compress.init_app(app) latest_blueprint = Blueprint('latest', __name__) current_version_blueprint = Blueprint(str(api_version), __name__) current_minor_version_blueprint = Blueprint(str(api_version_minor), __name__) specpath = '/cttv' if app.config['PROFILE'] == True: from werkzeug.contrib.profiler import ProfilerMiddleware app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30]) create_api(latest_blueprint, api_version, specpath) create_api(current_version_blueprint, api_version, specpath) create_api(current_minor_version_blueprint, api_version_minor, specpath) app.register_blueprint(latest_blueprint, url_prefix='/api/latest') app.register_blueprint(current_version_blueprint, url_prefix='/api/' + str(api_version)) app.register_blueprint(current_minor_version_blueprint, url_prefix='/api/' + str(api_version_minor)) @app.route('/api-docs/%s' % str(api_version_minor)) def docs_current_minor_version(): return redirect('/api/swagger/index.html') @app.route('/api-docs/%s' % str(api_version)) def docs_current_version(): return redirect('/api/swagger/index.html') @app.route('/api-docs') def docs(): return redirect('/api/swagger/index.html') def serve_swagger(): return app.send_static_file('docs/swagger/swagger.yaml') @app.route('/api/docs/swagger.yaml') def send_swagger(): return serve_swagger() @app.route('/api/latest/docs/swagger.yaml') def send_swagger_latest(): return serve_swagger() @app.route('/api/' + str(api_version) + '/docs/swagger.yaml') def send_swagger_current_cersion(): return serve_swagger() @app.before_request def before_request(): g.request_start = datetime.now() @app.after_request def after(resp): rate_limiter = RateLimiter() now = datetime.now() took = (now - g.request_start).total_seconds() * 1000 if took > 500: cache_time = str( int(3600 * took) ) # set cache to last one our for each second spent in the request resp.headers.add('X-Accel-Expires', cache_time) took = int(round(took)) LogApiCallWeight(took) # if took < RateLimiter.DEFAULT_CALL_WEIGHT: # took = RateLimiter.DEFAULT_CALL_WEIGHT current_values = increment_call_rate(took, rate_limiter) now = datetime.now() ceil10s = round(ceil_dt_to_future_time(now, 10), 2) ceil1h = round(ceil_dt_to_future_time(now, 3600), 2) usage_left_10s = rate_limiter.short_window_rate - current_values[ 'short'] usage_left_1h = rate_limiter.long_window_rate - current_values['long'] min_ceil = ceil10s if usage_left_1h < 0: min_ceil = ceil1h if (usage_left_10s < 0) or (usage_left_1h < 0): resp.headers.add('Retry-After', min_ceil) resp.headers.add('X-API-Took', took) resp.headers.add('X-Usage-Limit-10s', rate_limiter.short_window_rate) resp.headers.add('X-Usage-Limit-1h', rate_limiter.long_window_rate) resp.headers.add('X-Usage-Remaining-10s', usage_left_10s) resp.headers.add('X-Usage-Remaining-1h', usage_left_1h) # resp.headers.add('X-Usage-Limit-Reset-10s', ceil10s) # resp.headers.add('X-Usage-Limit-Reset-1h', ceil1h) resp.headers.add('Access-Control-Allow-Origin', '*') resp.headers.add('Access-Control-Allow-Headers', 'Content-Type,Auth-Token') if do_not_cache(request): # do not cache in the browser resp.headers.add('Cache-Control', "no-cache, must-revalidate, max-age=0") else: resp.headers.add( 'Cache-Control', "no-transform, public, max-age=%i, s-maxage=%i" % (took * 1800 / 1000, took * 9000 / 1000)) return resp return app
#!/usr/bin/python # -*- coding: UTF-8 -*- from flask import Flask, render_template, redirect, request, session from flask.ext.sqlalchemy import SQLAlchemy from flask_socketio import SocketIO from flask.ext.compress import Compress app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///dayu.db' app.secret_key = 'secret' db = SQLAlchemy(app) compress = Compress() compress.init_app(app) socketio = SocketIO(app) @app.errorhandler(500) def all_exception_handler(error): return redirect('/user/logout') @app.errorhandler(401) def no_authorization_handler(error): return redirect(session['last_url']) @app.after_request def after_any_request(response): path = request.path if request.method == 'GET' and '/static/' not in path: if request.query_string is not None:
'type': 'float' } } } self._validate_schema(schema, field, value, None) if not -180.0 <= value['coordinates'][0] <= 180.0: self._error(field, "Longitude must be in the range -180.0, 180.0") if not -90.0 <= value['coordinates'][1] <= 90.0: self._error(field, "Latitude must be in the range -90.0, 90.0") settingsfile = path.join(path.abspath(path.dirname(__file__)), 'settings.py') api = Eve(API_NAME, validator=KeySchemaValidator, settings=settingsfile) Bootstrap(api) Compress(api) api.register_blueprint(eve_docs, url_prefix='/docs') resource_url = lambda resource: '/' + api.config['URL_PREFIX'] + '/' + resource def get_schema(resource): "Get the schema for a given resource." return api.config['DOMAIN'][resource]['schema'] def add_document(resource, document): "Add a new document to the given resource." with api.test_request_context(resource_url(resource)): return post_internal(resource, payl=document, skip_validation=True)
from flask import * from flask import session import image_save from nocache import nocache import os, hashlib, db_details from flask.ext.mysql import MySQL from flask.ext.compress import Compress compress = Compress() import convert_file from werkzeug import secure_filename from hurry.filesize import size import generate_random, generate_hash, send_email, random_string, datetime, port_manage, perform_calcy, delete_calc, repeat, port_manage, single_container, db_manage import datetime, getdisk, subprocess import cms, getfile from lamp import * import threading mysql = MySQL() app = Flask(__name__) compress.init_app(app) app.config['MYSQL_DATABASE_USER'] = db_details.db_user app.config['MYSQL_DATABASE_PASSWORD'] = db_details.db_pass app.config['MYSQL_DATABASE_DB'] = db_details.db_name app.config['MYSQL_DATABASE_HOST'] = 'localhost' app.secret_key = os.urandom(24) thread = None mysql.init_app(app) basedir = os.path.abspath(os.path.dirname(__file__))
def register_extensions(app): cache_bust.init_cache_busting(app) Compress(app)
def test_constructor_init(self): Compress(self.app)
import os from flask import Flask, jsonify from flask.ext.compress import Compress # from flask.ext.cors import CORS from flask.ext.cache import Cache app = Flask(__name__) app.config.from_object('config') Compress(app) cache = Cache(app) from datausa.attrs.views import mod as attrs_module from datausa.core.views import mod as core_module app.register_blueprint(attrs_module) app.register_blueprint(core_module) # CORS(app) @app.errorhandler(500) def error_page(err): return jsonify(error=str(err)), 500
'REDIS_URL') or 'redis://localhost:6379' CELERY_REDIS_MAX_CONNECTIONS = 5 # Cache CACHE_TYPE = 'redis' CACHE_REDIS_URL = os.environ.get('REDIS_URL') or 'redis://localhost:6379' # Raygun RAYGUN_APIKEY = os.environ.get('RAYGUN_APIKEY') or 'debug' GEOCODE_APIKEY = os.environ.get('GEOCODE_APIKEY') app = Flask(__name__) app.config.from_object(__name__ + '.ConfigClass') Compress(app) # Initialize Flask-Compress db = SQLAlchemy(app) # Initialize Flask-SQLAlchemy mail = Mail(app) # Initialize Flask-Mail if not app.debug: flask_raygun.Provider(app, app.config['RAYGUN_APIKEY']).attach() celery = Celery(app.name, broker=app.config['CELERY_BROKER_URL']) celery.conf.update(app.config) cache = Cache(app, config={ 'CACHE_TYPE': app.config['CACHE_TYPE'], 'CACHE_REDIS_URL': app.config['CACHE_REDIS_URL'], })