def create_app(): app = Flask(__name__) app.wsgi_app = LighttpdCGIRootFix(app.wsgi_app) app.wsgi_app = HeaderRewriterFix(app.wsgi_app, remove_headers=['Date'], add_headers=[('X-Powered-By', 'WSGI'), ('Server', 'Noname Server')]) return app
def janitor_factory(options, auth_options): app = Flask(__name__) @app.route('/', methods=['GET']) @app.route('/<path:path>', methods=['GET']) def serve(path=options['default']): return send_from_directory(options['base_dir'], path) if auth_options['service'].lower() == 'github': if 'allowed_orgs' in auth_options: service = GithubService(allowed_orgs=auth_options['allowed_orgs']) else: service = GithubService() else: raise NotImplementedError() client = service.make_client( client_id=auth_options['client_id'], client_secret=auth_options['client_secret'], ) app.wsgi_app = client.wsgi_middleware(app.wsgi_app, secret=options['secret_key']) app.wsgi_app = ProxyFix(app.wsgi_app) app.wsgi_app = ELBPingPong(app.wsgi_app) return app
def create_app(config=None): app = Flask(__name__) app.wsgi_app = ProxyFix(app.wsgi_app) app.wsgi_app = HTTPMethodOverrideMiddleware(app.wsgi_app, querystring_param="_method") config = config or os.environ.get("HIPTWEET_CONFIG") or "default" app.config.from_object(expand_config(config)) configure_logger(app) sentry.init_app(app) db.init_app(app) login_manager.init_app(app) create_celery_app(app) if not app.debug: SSLify(app) from .oauth import twitter_bp app.register_blueprint(twitter_bp, url_prefix="/login") from .descriptors import descriptors as descriptors_blueprint app.register_blueprint(descriptors_blueprint) from .ui import ui as ui_blueprint app.register_blueprint(ui_blueprint) from .webhook import webhook as webhook_blueprint app.register_blueprint(webhook_blueprint, url_prefix="/webhook") from .tasks import tasks as tasks_blueprint app.register_blueprint(tasks_blueprint) return app
def create_app(name = __name__): app = Flask(__name__, static_path='/static') load_config(app) babel.init_app(app) #cache.init_app(app) db.init_app(app) filters.init_app(app) register_local_modules(app) app.wsgi_app = ProxyFixupHelper(app.wsgi_app) # Enable the DebugToolbar if app.config['DEBUG_TOOLBAR']: toolbar = DebugToolbarExtension(app) # Always attempt to set a BrowserId. At some point this will get used, # but let's start setting it now. app.wsgi_app = BrowserIdMiddleware( app.wsgi_app, secret_key=app.config['BROWSER_SECRET_KEY'], cookie_name='b', cookie_path='/', cookie_domain=None, cookie_lifetime=86400 * 365 * 10, cookie_secure=None, vary=()) return app
def create_app(): """Create Flask app.""" config = load_config() app = Flask(__name__) app.config.from_object(config) admin = Admin(app, name='microblog', template_mode='bootstrap3') # Add administrative views here if not hasattr(app, 'production'): app.production = not app.debug and not app.testing # Proxy fix app.wsgi_app = ProxyFix(app.wsgi_app) # CSRF protect CsrfProtect(app) if app.debug or app.testing: DebugToolbarExtension(app) # Serve static files app.wsgi_app = SharedDataMiddleware(app.wsgi_app, { '/pages': os.path.join(app.config.get('PROJECT_PATH'), 'application/pages') }) else: # Log errors to stderr in production mode app.logger.addHandler(logging.StreamHandler()) app.logger.setLevel(logging.ERROR) # Enable Sentry if app.config.get('SENTRY_DSN'): from .utils.sentry import sentry sentry.init_app(app, dsn=app.config.get('SENTRY_DSN')) # Serve static files app.wsgi_app = SharedDataMiddleware(app.wsgi_app, { '/static': os.path.join(app.config.get('PROJECT_PATH'), 'output/static'), '/pkg': os.path.join(app.config.get('PROJECT_PATH'), 'output/pkg'), '/pages': os.path.join(app.config.get('PROJECT_PATH'), 'output/pages') }) # Register components register_db(app) register_routes(app) register_jinja(app) register_error_handle(app) register_hooks(app) return app
def create_app(): """Create Flask app.""" config = load_config() if config.DEBUG or config.TESTING: app = Flask(__name__) else: app = Flask(__name__, template_folder=os.path.join(project_path, 'output/templates')) app.config.from_object(config) if not hasattr(app, 'production'): app.production = not app.debug and not app.testing # Proxy fix app.wsgi_app = ProxyFix(app.wsgi_app) # CSRF protect CsrfProtect(app) # Log errors to stderr in production mode if app.production: app.logger.addHandler(logging.StreamHandler()) app.logger.setLevel(logging.ERROR) # Enable Sentry if app.config.get('SENTRY_DSN'): from .utils.sentry import sentry sentry.init_app(app, dsn=app.config.get('SENTRY_DSN')) # Serve static files during production app.wsgi_app = SharedDataMiddleware(app.wsgi_app, { '/': os.path.join(project_path, 'output') }) else: DebugToolbarExtension(app) # Serve static files during development app.wsgi_app = SharedDataMiddleware(app.wsgi_app, { '/uploads': os.path.join(app.config.get('PROJECT_PATH'), 'uploads') }) # Register components register_db(app) register_routes(app) register_jinja(app) register_error_handle(app) register_uploadsets(app) register_hooks(app) return app
def create_app(): """Create Flask app.""" config = load_config() app = Flask(__name__) app.config.from_object(config) # Proxy fix app.wsgi_app = ProxyFix(app.wsgi_app) # CSRF protect CsrfProtect(app) if app.debug or app.testing: DebugToolbarExtension(app) # Serve static files app.wsgi_app = SharedDataMiddleware( app.wsgi_app, {"/pages": os.path.join(app.config.get("PROJECT_PATH"), "application/pages")} ) else: # Log errors to stderr in production mode app.logger.addHandler(logging.StreamHandler()) app.logger.setLevel(logging.ERROR) # Enable Sentry if app.config.get("SENTRY_DSN"): from .utils.sentry import sentry sentry.init_app(app, dsn=app.config.get("SENTRY_DSN")) # Serve static files app.wsgi_app = SharedDataMiddleware( app.wsgi_app, { "/static": os.path.join(app.config.get("PROJECT_PATH"), "output/static"), "/pkg": os.path.join(app.config.get("PROJECT_PATH"), "output/pkg"), "/pages": os.path.join(app.config.get("PROJECT_PATH"), "output/pages"), }, ) # Register components register_db(app) register_routes(app) register_jinja(app) register_error_handle(app) register_hooks(app) return app
def create_app(): app = Flask(__name__) app.config.from_object(config) app.register_blueprint(home.bp) app.register_blueprint(sse, url_prefix='/stream') app.wsgi_app = DispatcherMiddleware(app.wsgi_app, OrderedDict(( ('/j', json_api), ))) app.add_url_rule('/uploads/<filename>', 'uploaded_file', build_only=True) app.wsgi_app = SharedDataMiddleware(app.wsgi_app, { '/uploads': app.config['UPLOAD_FOLDER'] }) return app
def create_app(name='davesite', configuration='Default'): """ Overview: Factory method that is responsible for the following. Returns the configured Flask app object. * Reading the configuration. The configuration is kept in config.py * Registering the blueprints. Any blueprints to be added to the application are be added here. * Logging: DaveSite uses the built-in python logging module to provide console and file logging. All errors are logged to the console while only warnings and above are logged to the file. Parameters: name: package that davesite currently resides under. configuration: string that points to one of the classes in config.py Returns: A properly configured Flask application """ app = Flask(name) initialize_logging() try: app.config.from_object('davesite.app.config.{config}'.format(config=configuration)) initialize_blueprints(app) except Exception: app.logger.exception("Error while starting app:") sys.exit(-1) add_file_logging(app.config.get('ERROR_LOG_FILE', 'error.log'), logging.WARN) app.wsgi_app = URLPrefixMiddleware(app.wsgi_app, app.config.get('SCRIPT_NAME', '/')) return app
def create_app(*, debug=False, threads=1, bigchaindb_factory=None): """Return an instance of the Flask application. Args: debug (bool): a flag to activate the debug mode for the app (default: False). threads (int): number of threads to use Return: an instance of the Flask application. """ if not bigchaindb_factory: bigchaindb_factory = BigchainDB app = Flask(__name__) app.wsgi_app = StripContentTypeMiddleware(app.wsgi_app) CORS(app) app.debug = debug app.config['bigchain_pool'] = utils.pool(bigchaindb_factory, size=threads) add_routes(app) return app
def create_app(package_name, package_path, settings_override=None): """Returns a :class:`Flask` application instance configured with common functionality for the Overholt platform. :param package_name: application package name :param package_path: application package path :param settings_override: a dictionary of settings to override :param register_security_blueprint: flag to specify if the Flask-Security Blueprint should be registered. Defaults to `True`. """ app = Flask(package_name, instance_relative_config=True, instance_path=os.path.dirname(__file__)) #app.config.from_object('vilya.settings') app.config.from_pyfile("app.cfg") app.config.from_pyfile("app-dev.cfg", silent=True) app.config.from_object(settings_override) db.init_app(app) cache.init_app(app) register_blueprints(app, package_name, package_path) app.wsgi_app = HTTPMethodOverrideMiddleware(app.wsgi_app) return app
def create_app(package_name, package_path, settings_override=None, register_security_blueprint=True): """Returns a :class:`Flask` application instance configured with common functionality for the _app_name_ platform. :param package_name: application package name :param package_path: application package path :param settings_override: a dictionary of settings to override :param register_security_blueprint: flag to specify if the Flask-Security Blueprint should be registered. Defaults to `True`. """ app = Flask(package_name, instance_relative_config=True) app.config.from_object('_app_name_.settings') app.config.from_pyfile('settings.cfg', silent=True) app.config.from_object(settings_override) db.init_app(app) mail.init_app(app) security.init_app(app, SQLAlchemyUserDatastore(db, User, Role), register_blueprint=register_security_blueprint) register_blueprints(app, package_name, package_path) app.wsgi_app = HTTPMethodOverrideMiddleware(app.wsgi_app) return app
def create_app(config, debug=True): app = Flask(__name__) app.debug = debug app.config['SECRET_KEY'] = 'secret' app.config['SECURITY_POST_LOGIN_VIEW'] = '/profile' from tests.test_app.config import Config app.config.from_object(Config) if config: for key, value in config.items(): app.config[key] = value app.wsgi_app = HTTPMethodOverrideMiddleware(app.wsgi_app) @app.route('/') def index(): return render_template('index.html', content='Home Page') @app.route('/profile') @login_required def profile(): twitter = current_app.social.twitter twitter.get_api() return render_template( 'profile.html', content='Profile Page', twitter_conn=twitter.get_connection(), google_conn=current_app.social.google.get_connection(), facebook_conn=current_app.social.facebook.get_connection(), foursquare_conn=current_app.social.foursquare.get_connection()) return app
def create_app(package_name, package_path, settings_override=None, register_security_blueprint=True): """Returns a :class:`Flask` application instance configured with common functionality for the Overholt platform. :param package_name: application package name :param package_path: application package path :param settings_override: a dictionary of settings to override :param register_security_blueprint: flag to specify if the Flask-Security Blueprint should be registered. Defaults to `True`. """ app = Flask(package_name, instance_relative_config=True) app.config.from_object('milishare.settings') app.config.from_pyfile('settings.cfg', silent=True) app.config.from_object(settings_override) # SQLAlchemy instance. db.init_app(app) # with app.app_context(): # db.create_all() register_blueprints(app, package_name, package_path) app.wsgi_app = HTTPMethodOverrideMiddleware(app.wsgi_app) return app
def create_app(): """Create the Flask app.""" app = Flask("modelconvert") app.config.from_object('modelconvert.settings') app.config.from_envvar('MODELCONVERT_SETTINGS', silent=True) configure_logging(app) app.register_blueprint(frontend) celery.add_defaults(app.config) # configure error handlers @app.errorhandler(403) def forbidden_page(error): return render_template("403.html"), 403 @app.errorhandler(404) def page_not_found(error): return render_template("404.html"), 404 @app.errorhandler(500) def server_error_page(error): return render_template("500.html"), 500 if app.config['DEBUG']: from werkzeug.wsgi import SharedDataMiddleware app.wsgi_app = SharedDataMiddleware(app.wsgi_app, { '/preview': app.config["DOWNLOAD_PATH"] }) return app
def create_app(config_name): app = Flask(__name__) app.config.from_object(config[config_name]) bootstrap.init_app(app) db.init_app(app) security.init_app(app, user_datastore) toolbar.init_app(app) log_level = os.environ.get('LOG_LEVEL', 'INFO') logger = logging.getLogger(__name__) logger.setLevel(logging_map[log_level]) stream = logging.StreamHandler() stream.setLevel(logging_map[log_level]) logger.addHandler(stream) if config_name in ('docker', 'development', 'production'): sentry.init_app(app, logging=True, level=logging.INFO) app.wsgi_app = ProxyFix(app.wsgi_app) from app.celery import celery celery.conf.update(app.config) from .main import main app.register_blueprint(main) from .admin import admin admin.init_app(app) from .api_0_1 import api as api_0_1_blueprint app.register_blueprint(api_0_1_blueprint, url_prefix='/api/0.1') return app
def get_app(): app = Flask('kardboard') app.config.from_object('kardboard.default_settings') if os.getenv('KARDBOARD_SETTINGS', None): app.config.from_envvar('KARDBOARD_SETTINGS') app.secret_key = app.config['SECRET_KEY'] app.db = PortAwareMongoEngine(app) app.jinja_env.add_extension('kardboard.util.Markdown2Extension') app.jinja_env.filters['slugify'] = slugify app.jinja_env.filters['timesince'] = timesince app.jinja_env.filters['jsonencode'] = jsonencode app.jinja_env.globals['newrelic_head'] = newrelic_head app.jinja_env.globals['newrelic_foot'] = newrelic_foot configure_logging(app) try: from flaskext.exceptional import Exceptional except ImportError: pass exceptional_key = app.config.get('EXCEPTIONAL_API_KEY', '') if exceptional_key: exceptional = Exceptional(app) app._exceptional = exceptional app.wsgi_app = FixGunicorn(app.wsgi_app) return app
def create_app(): app = Flask(__name__, static_folder=os.path.join(PROJECT_ROOT, 'public'), static_url_path='/public') app.config.update(os.environ) #TODO: read in right hand side from HT config vars app.config['SECRET_KEY'] = 'secret' app.config['SECURITY_PASSWORD_HASH'] = 'bcrypt' app.config['MONGODB_DB'] = 'flask_security_test' app.config['MONGODB_HOST'] = 'localhost' app.config['MONGODB_PORT'] = 27017 app.debug = app.config['X_HT_DEBUG'] == "True" app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {'/': os.path.join(os.path.dirname(__file__), 'public') }) @app.errorhandler(404) def page_not_found(e): return render_template('404.html'), 404 SecuritySetup(app) # import & register blueprints here: #=================================== from hero_tmpl.views.security import security app.register_blueprint(security) from hero_tmpl.views.misc import misc app.register_blueprint(misc) return app
def make_oauth_wsgi(oauth, next_app, config=None): app = Flask(__name__, static_folder=None) app.config['PROPAGATE_EXCEPTIONS'] = True if config: app.config.update(config) else: app.config.from_envvar('OAUTH_SETTINGS') app.next_app = next_app oauth.init_app(app) app.add_url_rule('/login', endpoint=None, view_func=login) app.add_url_rule('/logout', endpoint=None, view_func=logout) app.add_url_rule('/login/oauth_callback', endpoint=None, view_func=authorized) app.add_url_rule( '/', endpoint=None, view_func=default_handler, methods=['GET', 'HEAD', 'POST', 'PUT', 'DELETE'], ) app.add_url_rule( '/<path:any>', endpoint=None, view_func=default_handler, methods=['GET', 'HEAD', 'POST', 'PUT', 'DELETE'], ) auth = get_auth_provider(oauth) if not auth._tokengetter: auth.tokengetter(get_oauth_token) app.wsgi_app = ProxyFix(app.wsgi_app) return app
def create_app(config_name): app = Flask(__name__) app.config.from_object(config[config_name]) config[config_name].init_app(app) # Apply the SchemeProxyFix middleware app.wsgi_app = SchemeProxyFix(app.wsgi_app) # Add our cache if config_name == 'production': # pragma: no cover app.cache = FileSystemCache('recent_calls') else: app.cache = SimpleCache() bootstrap.init_app(app) db.init_app(app) from .setup import setup as setup_blueprint app.register_blueprint(setup_blueprint) from .voice import voice as voice_blueprint app.register_blueprint(voice_blueprint) # Register our custom template filter app.jinja_env.filters['national_format'] = convert_to_national_format return app
def create_app(config): global app app = Flask( __name__, template_folder='../templates', static_folder='../static' ) app.wsgi_app = ProxyFix(app.wsgi_app) app.config.update(config) app.url_rule_class = GeneratorRule app.redis = redis.Redis.from_url(config['REDISCLOUD_URL']) import views import login import login_oauth import talks import program import entrant import vote import filters import presenters_go import service return app
def create_app(package_name, package_path, settings_override=None, register_security_blueprint=True): app = Flask(package_name, instance_relative_config=True) app.config.from_object('{}.settings'.format(__package__)) app.config.from_object(settings_override) db.init_app(app) mail.init_app(app) cache.init_app(app) sentry.init_app(app, logging=True, level=logging.ERROR, wrap_wsgi=True) user_datastore = SQLAlchemyUserDatastore(db, User, Role) Security(app, user_datastore, register_blueprint=register_security_blueprint, register_form=RegisterForm, confirm_register_form=ConfirmRegisterForm) # noinspection PyUnusedLocal @user_registered.connect_via(app) def user_registered_sighandler(_app, user, confirm_token): # pylint: disable=unused-argument,unused-variable # Add default user role user_datastore.add_role_to_user(user, user_datastore.find_role('user')) db.session.add(user) db.session.commit() register_blueprints(app, package_name, package_path) app.wsgi_app = ProxyFix(app.wsgi_app) return app
def create_app(config_object=DevConfig): config = CWRValidatorConfiguration().get_config() app = Flask(__name__) api = Api(app) app.config.from_object(config_object) _register_resources(api) _load_services(app, config) app.wsgi_app = ProxyFix(app.wsgi_app) if app.config['DEBUG']: log = config['log.folder'] if len(log) == 0: log = 'mera_ws.log' handler = RotatingFileHandler(log, maxBytes=10000, backupCount=1) handler.setLevel(logging.DEBUG) handler.setFormatter( Formatter('[%(levelname)s][%(asctime)s] %(message)s')) logging.basicConfig(level=logging.DEBUG) logging.getLogger('').addHandler(handler) app.logger.addHandler(handler) return app
def create_app(**kwargs): """Return the OAR API application instance.""" app = Flask(__name__) app.wsgi_app = WSGIProxyFix(app.wsgi_app) app.wsgi_app = PrefixMiddleware(app.wsgi_app) config.setdefault_config(default_config) app.config.update(config) db.query_class = APIQuery db.query_collection_class = APIQueryCollection register_error_handlers(app) register_hooks(app) register_extensions(app) register_blueprints(app) register_proxy(app, **kwargs) return app
def create_app(config={}): app = Flask("chassis") app.config.from_envvar("FLASK_CONFIG") app.config.update(config) #External sentry.init_app(app) api.init_app(app) cache.init_app(app) #Internal db.init_app(app) #API Endpoints api.add_resource(CatAPI, "/cats/<int:cat_id>") with app.app_context(): db.create_all() #Debug tools if app.debug: DebugToolbarExtension(app) if app.config.get("PROFILE", False): app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30], sort_by=("time", "cumulative")) return app
def create_app(config=None, app_name=None, blueprints=None): """Create a Flask app.""" if app_name is None: app_name = DefaultConfig.PROJECT if blueprints is None: blueprints = DEFAULT_BLUEPRINTS app = Flask(app_name, instance_path=INSTANCE_FOLDER_PATH, instance_relative_config=True) app.wsgi_app = ReverseProxied(app.wsgi_app) configure_app(app, config) configure_hook(app) configure_blueprints(app, blueprints) configure_extensions(app) configure_logging(app) configure_template_filters(app) configure_error_handlers(app) appsocket = create_decoder_socket(app) decoder = Decoder(app, appsocket) manager = Manager(app) app.decoder = decoder return app, appsocket
def create_app(config_name): # app factory, generating our application object app = Flask(__name__) app.config.from_object(config_by_name[config_name]) db.init_app(app) login_manager.init_app(app) moment.init_app(app) toolbar.init_app(app) stats_client.init_app(app) # initialize our statsd client, assign it within app annotator.init_app(app) # initialize our annotator, assign it within app app.wsgi_app = statsd_middleware(app) # initialize our statsd middleware # annotator.write(module='app',action='startup',text='application initialized') from .main import main as main_blueprint # blueprints are self contained portions of an application app.register_blueprint(main_blueprint, url_prefix='/') from .bookmarks import bookmarks as bkm_blueprint app.register_blueprint(bkm_blueprint, url_prefix='/bookmarks') from .auth import auth as auth_blueprint app.register_blueprint(auth_blueprint, url_prefix='/auth') @app.after_request # sql records are available up until the end of the request def after_request(response): # hook into 'after request' allowing us to send to statsd if app.config['SQLALCHEMY_RECORD_QUERIES']: queries = get_debug_queries() for query in queries: context=query.context.replace(':','_') print context duration = query.duration * 1000 #convert to ms app.stats_client.timing('thermos.queries,context={},path={}'.format(context,request.path),duration) print (query.duration) return response return app
def create_app(config): global app app = Flask( __name__, template_folder='../templates', static_folder='../static', ) app.wsgi_app = ProxyFix(app.wsgi_app) app.config.update(config) app.url_rule_class = GeneratorRule app.redis = redis.Redis.from_url(config['REDISCLOUD_URL']) app.eventee = { 'token': config['EVENTEE_TOKEN'], 'email': config['EVENTEE_EMAIL'], } from . import views from . import login from . import login_oauth from . import talks from . import program from . import entrant from . import vote from . import filters from . import service from . import workshops from . import invoices from .admin import admin app.register_blueprint(admin, url_prefix='/admin') return app
def create_app(): app = Flask(__name__) app.config.from_pyfile('setting.py') register_blueprint(app) config_logging(app) app.wsgi_app = ProxyFix(app.wsgi_app) return app
def create_app(config_name, set_utf=True): """App creation factory based on the FLASK_CONFIG env var.""" if set_utf: setdefaultencoding() app = Flask( __name__, template_folder="../templates/", static_url_path="/static", static_folder="../templates/static/" ) app.config.from_object(config[config_name]) config[config_name].init_app(app) # csrf.init_app(app) db.init_app(app) login_manager.init_app(app) app.wsgi_app = ProxyFix(app.wsgi_app) mount_blueprints(app, config_name) add_error_handlers(app) if not app.debug: app.logger.addHandler( get_mail_handler(name=config['default'].APP_NAME, level=logging.ERROR)) else: app.logger.addHandler(get_file_handler()) return app
def create_app(config=None, session=None, testing=False, app_name="Airflow"): global app, appbuilder app = Flask(__name__) if conf.getboolean('webserver', 'ENABLE_PROXY_FIX'): app.wsgi_app = ProxyFix(app.wsgi_app, num_proxies=conf.get("webserver", "PROXY_FIX_NUM_PROXIES", fallback=None), x_for=conf.getint("webserver", "PROXY_FIX_X_FOR", fallback=1), x_proto=conf.getint("webserver", "PROXY_FIX_X_PROTO", fallback=1), x_host=conf.getint("webserver", "PROXY_FIX_X_HOST", fallback=1), x_port=conf.getint("webserver", "PROXY_FIX_X_PORT", fallback=1), x_prefix=conf.getint("webserver", "PROXY_FIX_X_PREFIX", fallback=1)) app.secret_key = conf.get('webserver', 'SECRET_KEY') session_lifetime_days = conf.getint('webserver', 'SESSION_LIFETIME_DAYS', fallback=30) app.config['PERMANENT_SESSION_LIFETIME'] = timedelta( days=session_lifetime_days) app.config.from_pyfile(settings.WEBSERVER_CONFIG, silent=True) app.config['APP_NAME'] = app_name app.config['TESTING'] = testing app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False app.config['SESSION_COOKIE_HTTPONLY'] = True app.config['SESSION_COOKIE_SECURE'] = conf.getboolean( 'webserver', 'COOKIE_SECURE') app.config['SESSION_COOKIE_SAMESITE'] = conf.get('webserver', 'COOKIE_SAMESITE') if config: app.config.from_mapping(config) # Configure the JSON encoder used by `|tojson` filter from Flask app.json_encoder = AirflowJsonEncoder csrf.init_app(app) db = SQLA(app) from airflow import api api.load_auth() api.API_AUTH.api_auth.init_app(app) Cache(app=app, config={'CACHE_TYPE': 'filesystem', 'CACHE_DIR': '/tmp'}) from airflow.www.blueprints import routes app.register_blueprint(routes) configure_logging() configure_manifest_files(app) with app.app_context(): from airflow.www.security import AirflowSecurityManager security_manager_class = app.config.get('SECURITY_MANAGER_CLASS') or \ AirflowSecurityManager if not issubclass(security_manager_class, AirflowSecurityManager): raise Exception( """Your CUSTOM_SECURITY_MANAGER must now extend AirflowSecurityManager, not FAB's security manager.""") appbuilder = AppBuilder(app, db.session if not session else session, security_manager_class=security_manager_class, base_template='appbuilder/baselayout.html', update_perms=conf.getboolean( 'webserver', 'UPDATE_FAB_PERMS')) def init_views(appbuilder): from airflow.www import views # Remove the session from scoped_session registry to avoid # reusing a session with a disconnected connection appbuilder.session.remove() appbuilder.add_view_no_menu(views.Airflow()) appbuilder.add_view_no_menu(views.DagModelView()) appbuilder.add_view(views.DagRunModelView, "DAG Runs", category="Browse", category_icon="fa-globe") appbuilder.add_view(views.JobModelView, "Jobs", category="Browse") appbuilder.add_view(views.LogModelView, "Logs", category="Browse") appbuilder.add_view(views.SlaMissModelView, "SLA Misses", category="Browse") appbuilder.add_view(views.TaskInstanceModelView, "Task Instances", category="Browse") appbuilder.add_view(views.ConfigurationView, "Configurations", category="Admin", category_icon="fa-user") appbuilder.add_view(views.ConnectionModelView, "Connections", category="Admin") appbuilder.add_view(views.PoolModelView, "Pools", category="Admin") appbuilder.add_view(views.VariableModelView, "Variables", category="Admin") appbuilder.add_view(views.XComModelView, "XComs", category="Admin") if "dev" in version.version: airflow_doc_site = "https://airflow.readthedocs.io/en/latest" else: airflow_doc_site = 'https://airflow.apache.org/docs/{}'.format( version.version) appbuilder.add_link("Website", href='https://airflow.apache.org', category="Docs", category_icon="fa-globe") appbuilder.add_link("Documentation", href=airflow_doc_site, category="Docs", category_icon="fa-cube") appbuilder.add_link("GitHub", href='https://github.com/apache/airflow', category="Docs") appbuilder.add_view(views.VersionView, 'Version', category='About', category_icon='fa-th') def integrate_plugins(): """Integrate plugins to the context""" from airflow.plugins_manager import ( flask_appbuilder_views, flask_appbuilder_menu_links) for v in flask_appbuilder_views: log.debug("Adding view %s", v["name"]) appbuilder.add_view(v["view"], v["name"], category=v["category"]) for ml in sorted(flask_appbuilder_menu_links, key=lambda x: x["name"]): log.debug("Adding menu link %s", ml["name"]) appbuilder.add_link(ml["name"], href=ml["href"], category=ml["category"], category_icon=ml["category_icon"]) integrate_plugins() # Garbage collect old permissions/views after they have been modified. # Otherwise, when the name of a view or menu is changed, the framework # will add the new Views and Menus names to the backend, but will not # delete the old ones. def init_plugin_blueprints(app): from airflow.plugins_manager import flask_blueprints for bp in flask_blueprints: log.debug("Adding blueprint %s:%s", bp["name"], bp["blueprint"].import_name) app.register_blueprint(bp["blueprint"]) init_views(appbuilder) init_plugin_blueprints(app) if conf.getboolean('webserver', 'UPDATE_FAB_PERMS'): security_manager = appbuilder.sm security_manager.sync_roles() from airflow.www.api.experimental import endpoints as e # required for testing purposes otherwise the module retains # a link to the default_auth if app.config['TESTING']: import importlib importlib.reload(e) app.register_blueprint(e.api_experimental, url_prefix='/api/experimental') @app.context_processor def jinja_globals(): # pylint: disable=unused-variable globals = { 'hostname': socket.getfqdn() if conf.getboolean( 'webserver', 'EXPOSE_HOSTNAME', fallback=True) else 'redact', 'navbar_color': conf.get('webserver', 'NAVBAR_COLOR'), 'log_fetch_delay_sec': conf.getint('webserver', 'log_fetch_delay_sec', fallback=2), 'log_auto_tailing_offset': conf.getint('webserver', 'log_auto_tailing_offset', fallback=30), 'log_animation_speed': conf.getint('webserver', 'log_animation_speed', fallback=1000) } if 'analytics_tool' in conf.getsection('webserver'): globals.update({ 'analytics_tool': conf.get('webserver', 'ANALYTICS_TOOL'), 'analytics_id': conf.get('webserver', 'ANALYTICS_ID') }) return globals @app.before_request def before_request(): _force_log_out_after = conf.getint('webserver', 'FORCE_LOG_OUT_AFTER', fallback=0) if _force_log_out_after > 0: flask.session.permanent = True app.permanent_session_lifetime = datetime.timedelta( minutes=_force_log_out_after) flask.session.modified = True flask.g.user = flask_login.current_user @app.after_request def apply_caching(response): _x_frame_enabled = conf.getboolean('webserver', 'X_FRAME_ENABLED', fallback=True) if not _x_frame_enabled: response.headers["X-Frame-Options"] = "DENY" return response @app.teardown_appcontext def shutdown_session(exception=None): # pylint: disable=unused-variable settings.Session.remove() @app.before_request def make_session_permanent(): flask_session.permanent = True return app, appbuilder
# Can be pickle (default), json, yaml, msgpack solr_url = "http://politicalframing.com:8983/solr" #"http://localhost:8983/solr/" # "http://localhost:8983/solr/" # "http://politicalframing.com:8983/solr" h = httplib2.Http(cache="/var/tmp/solr_cache") si = SolrInterface(url=solr_url, http_connection=h) # Instantiate Flask SECRET_KEY = 'poop' CELERY_ACCEPT_CONTENT = ['pickle', 'json', 'msgpack', 'yaml'] app = Flask(__name__) app.config.from_object(__name__) celery = make_celery(app) app.wsgi_app = SharedDataMiddleware( app.wsgi_app, {'/': os.path.join(os.path.dirname(__file__), 'static')}) app.wsgi_app = SharedDataMiddleware( app.wsgi_app, {'/': os.path.join(os.path.dirname(__file__), 'static/.tmp')}) # Compile Assets # assets = Environment(app) # assets.url = app.static_url_path # css_bundle = Bundle('css/home.css.sass', filters='sass', output='all.css') # assets.register('css_all', css_bundle) # js_bundle = Bundle('js/test.js.coffee', filters='coffeescript', output='all.js') # assets.register('js_all', js_bundle)
import os from flask import Flask from flask_cors import CORS from flask_restful import Api from previews.routes import ( PreviewRequests, HealthzEndpoint, ) from flask_prom import monitor app = Flask("preview") CORS(app) api = Api(app) app.wsgi_app = monitor(app) api.add_resource(PreviewRequests, "/") api.add_resource(HealthzEndpoint, "/healthz") if __name__ == "__main__": app.run(debug=os.environ.get("FLASK_DEBUG", False))
'CACHE_TYPE': app.config['CACHE_TYPE'], 'CACHE_REDIS_HOST': app.config['CACHE_REDIS_HOST'], 'CACHE_KEY_PREFIX': app.config['CACHE_KEY_PREFIX'] }) # Load static assets manifest file, which maps source file names to the # corresponding versioned/hashed file name. _manifest_path = app.config.get('ASSET_MANIFEST_PATH') if _manifest_path: with open(_manifest_path, 'r') as f: app.config['ASSET_MANIFEST'] = json.loads(f.read()) app.logger.info(app.config['ASSET_MANIFEST']) # Enable Whitenoise which will allow the application when using nginx to serve out the static assets. if app.config['STATIC_ASSET_PATH']: app.wsgi_app = WhiteNoise(app.wsgi_app, root=app.config['STATIC_ASSET_PATH'], prefix='static/') # Enable CORS, if specified in the configuration. if app.config.get('FLASK_CORS'): from flask_cors import CORS CORS(app) from .auth.views import auth_blueprint from .manager.views import manager from .pubswh.views import pubswh from .metrics.views import metrics from . import filters # pylint: disable=C0413 app.register_blueprint(auth_blueprint) app.register_blueprint(manager, url_prefix='/manager') app.register_blueprint(metrics, url_prefix='/metrics')
try: return render_template( f'{template_name}.html', body_id=template_name, meetups=MEETUPS) except TemplateNotFound: abort(404) @app.route('/docs/<name>') def rest(name): with open(f'templates/{name}.rst') as fd: html = docutils.core.publish_parts( source=fd.read(), writer=docutils.writers.html5_polyglot.Writer(), settings_overrides={'initial_header_level': 2})['body'] return render_template('rst.html', body_id=name, html=html) @app.route('/feed/<name>') def feed(name): feed = feedparser.parse(FEEDS[name]) return render_template('feed.html', body_id=name, entries=feed.entries) if __name__ == '__main__': from sassutils.wsgi import SassMiddleware app.wsgi_app = SassMiddleware(app.wsgi_app, { 'afpy': ('sass', 'static/css', '/static/css') }) app.run(debug=True)
from collections import deque import itertools import config IMAGE_NAME = "./static/very_good_security.png" MAX_COOKIE_SIZE = 100 from werkzeug.debug import DebuggedApplication app = Flask(__name__) app.secret_key = config.SECRET_KEY app.config['SESSION_TYPE'] = 'filesystem' app.config['ENV'] = 'development' app.config['TESTING'] = True app.wsgi_app = DebuggedApplication(app.wsgi_app, True) app.debug = True sess = Session() sess.init_app(app) if __name__ == "__main__": app.run(host='0.0.0.0:5000') @app.route('/', methods=('GET', 'POST')) def index(): if request.method == 'GET': return initiate_page() if request.method == 'POST': return prepare_pixel_payload()
# app.py from flask import Flask, request, jsonify, abort app = Flask(__name__) from whitenoise import WhiteNoise app.wsgi_app = WhiteNoise(app.wsgi_app, root='./static') import urllib.request import json import pymongo import random import string from tqdm import tqdm import requests import downloader from bson.json_util import dumps from bson.json_util import loads import os import wget import ffmpeg dirName = './static' bashCommand = "bash apt-get install -y ffmpeg" import subprocess process = subprocess.Popen(bashCommand.split(), stdout=subprocess.PIPE) output, error = process.communicate() try: # Create target Directory os.mkdir(dirName) print("Directory ", dirName, " Created ") except FileExistsError: print("Directory ", dirName, " already exists")
self.batch_send = batch_send self.batch_send_every_n = batch_send_every_n self.producer = SimpleProducer(self.client, batch_send=batch_send, batch_send_every_n=batch_send_every_n) def send_messages(self, topic, msg): self.producer.send_messages(topic, msg) app = Flask(__name__) @app.route('/<topic>', methods=['GET', 'POST']) def index(topic): data = request.get_data() hosts = { 'kafka.host1': '9092', 'kafka.host2': '9092', 'kafka,host3': '9092' } producer = Producer(hosts, False, 20) producer.send_messages(topic, data) return 'success' app.wsgi_app = ProxyFix(app.wsgi_app) if __name__ == "__main__": app.run()
if scheme: environ['wsgi.url_scheme'] = scheme return self.app(environ, start_response) app = Flask(__name__) app.json_encoder = LazyJSONEncoder ES_SERVER = {"host": config["es_host"], "port": int(config["es_port"])} INDEX_NAME = 'arguments' es = Elasticsearch(hosts=[ES_SERVER]) reversed = True if (reversed): app.wsgi_app = ReverseProxied(app.wsgi_app) template2 = dict(swaggerUiPrefix=LazyString( lambda: request.environ.get('HTTP_X_SCRIPT_NAME', ''))) swagger = Swagger(app, template=template2) else: swagger = Swagger(app) api = Api(app) def create_api_url(endpoint): return 'http://' + config["backend_host"] + ":" + config[ "backend_port"] + "/" + endpoint class Sender:
def create_app() -> Flask: app = Flask(__name__) # SimpleLogin is deployed behind NGINX app.wsgi_app = ProxyFix(app.wsgi_app, num_proxies=1) limiter.init_app(app) app.url_map.strict_slashes = False app.config["SQLALCHEMY_DATABASE_URI"] = DB_URI app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False # enable to print all queries generated by sqlalchemy # app.config["SQLALCHEMY_ECHO"] = True app.secret_key = FLASK_SECRET app.config["TEMPLATES_AUTO_RELOAD"] = True # to avoid conflict with other cookie app.config["SESSION_COOKIE_NAME"] = SESSION_COOKIE_NAME if URL.startswith("https"): app.config["SESSION_COOKIE_SECURE"] = True app.config["SESSION_COOKIE_SAMESITE"] = "Lax" setup_error_page(app) init_extensions(app) register_blueprints(app) set_index_page(app) jinja2_filter(app) setup_favicon_route(app) setup_openid_metadata(app) init_admin(app) setup_paddle_callback(app) setup_do_not_track(app) if FLASK_PROFILER_PATH: LOG.d("Enable flask-profiler") app.config["flask_profiler"] = { "enabled": True, "storage": { "engine": "sqlite", "FILE": FLASK_PROFILER_PATH }, "basicAuth": { "enabled": True, "username": "******", "password": FLASK_PROFILER_PASSWORD, }, "ignore": ["^/static/.*", "/git", "/exception"], } flask_profiler.init_app(app) # enable CORS on /api endpoints cors = CORS(app, resources={r"/api/*": {"origins": "*"}}) # set session to permanent so user stays signed in after quitting the browser # the cookie is valid for 7 days @app.before_request def make_session_permanent(): session.permanent = True app.permanent_session_lifetime = timedelta(days=7) return app
from flask import Flask, render_template from flask_bootstrap import Bootstrap from whitenoise import WhiteNoise app = Flask(__name__) Bootstrap(app) app.wsgi_app = WhiteNoise(app.wsgi_app, root="static/") app.config["BOOTSTRAP_SERVE_LOCAL"] = True @app.route("/", methods=["get"]) def index(): return render_template("index.html") if __name__ == "__main__": app.run(debug=True)
def make_app(build_dir: str, models: Dict[str, DemoModel], demo_db: Optional[DemoDatabase] = None, cache_size: int = 128, interpret_cache_size: int = 500, attack_cache_size: int = 500) -> Flask: if not os.path.exists(build_dir): logger.error("app directory %s does not exist, aborting", build_dir) sys.exit(-1) app = Flask(__name__) # pylint: disable=invalid-name start_time = datetime.now(pytz.utc) start_time_str = start_time.strftime("%Y-%m-%d %H:%M:%S %Z") app.predictors = {} app.max_request_lengths = { } # requests longer than these will be rejected to prevent OOME app.attackers = defaultdict(dict) app.interpreters = defaultdict(dict) app.wsgi_app = ProxyFix( app.wsgi_app) # sets the requester IP with the X-Forwarded-For header for name, demo_model in models.items(): if demo_model is not None: logger.info(f"loading {name} model") predictor = demo_model.predictor() app.predictors[name] = predictor app.max_request_lengths[name] = demo_model.max_request_length if name in supported_interpret_models: app.interpreters[name]['simple_gradient'] = SimpleGradient( predictor) app.interpreters[name][ 'integrated_gradient'] = IntegratedGradient(predictor) app.interpreters[name]['smooth_gradient'] = SmoothGradient( predictor) app.attackers[name]["input_reduction"] = InputReduction( predictor) if name == 'masked-lm': app.attackers[name]["hotflip"] = Hotflip(predictor, 'bert') elif name == "next-token-lm": app.attackers[name]["hotflip"] = Hotflip(predictor, 'gpt2') elif 'named-entity-recognition' in name: # We haven't implemented hotflip for NER. continue elif name == 'textual-entailment': # The SNLI model only has ELMo embeddings, which don't work with hotflip on # their own. continue else: app.attackers[name]["hotflip"] = Hotflip(predictor) app.attackers[name]["hotflip"].initialize() # Disable caching for HTML documents and API responses so that clients # always talk to the source (this server). @app.after_request def set_cache_headers(resp: Response) -> Response: if resp.mimetype == "text/html" or resp.mimetype == "application/json": return with_no_cache_headers(resp) else: return resp @app.errorhandler(ServerError) def handle_invalid_usage(error: ServerError) -> Response: # pylint: disable=unused-variable response = jsonify(error.to_dict()) response.status_code = error.status_code return response @lru_cache(maxsize=cache_size) def _caching_prediction(model: Predictor, data: str) -> JsonDict: """ Just a wrapper around ``model.predict_json`` that allows us to use a cache decorator. """ return model.predict_json(json.loads(data)) @lru_cache(maxsize=interpret_cache_size) def _caching_interpret(interpreter: SaliencyInterpreter, data: str) -> JsonDict: """ Just a wrapper around ``model.interpret_from_json`` that allows us to use a cache decorator. """ return interpreter.saliency_interpret_from_json(json.loads(data)) @lru_cache(maxsize=attack_cache_size) def _caching_attack(attacker: Attacker, data: str, input_field_to_attack: str, grad_input_field: str, target: str) -> JsonDict: """ Just a wrapper around ``model.attack_from_json`` that allows us to use a cache decorator. """ return attacker.attack_from_json( inputs=json.loads(data), input_field_to_attack=input_field_to_attack, grad_input_field=grad_input_field, target=json.loads(target)) @app.route('/') def index() -> Response: # pylint: disable=unused-variable return send_file(os.path.join(build_dir, 'index.html')) @app.route('/permadata/<model_name>', methods=['POST', 'OPTIONS']) def permadata(model_name: str) -> Response: # pylint: disable=unused-variable """ If the user requests a permalink, the front end will POST here with the payload { slug: slug } which we convert to an integer id and use to retrieve saved results from the database. """ # This is just CORS boilerplate. if request.method == "OPTIONS": return Response(response="", status=200) # If we don't have a database configured, there are no permalinks. if demo_db is None: raise ServerError('Permalinks are not enabled', 400) # Convert the provided slug to an integer id. slug = request.get_json()["slug"] perma_id = slug_to_int(slug) if perma_id is None: # Malformed slug raise ServerError("Unrecognized permalink: {}".format(slug), 400) # Fetch the results from the database. try: permadata = demo_db.get_result(perma_id) except psycopg2.Error: logger.exception( "Unable to get results from database: perma_id %s", perma_id) raise ServerError('Database trouble', 500) if permadata is None: # No data found, invalid id? raise ServerError("Unrecognized permalink: {}".format(slug), 400) return jsonify({ "modelName": permadata.model_name, "requestData": permadata.request_data, "responseData": permadata.response_data }) @app.route('/predict/<model_name>', methods=['POST', 'OPTIONS']) def predict(model_name: str) -> Response: # pylint: disable=unused-variable """make a prediction using the specified model and return the results""" if request.method == "OPTIONS": return Response(response="", status=200) # Do log if no argument is specified record_to_database = request.args.get("record", "true").lower() != "false" # Do use the cache if no argument is specified use_cache = request.args.get("cache", "true").lower() != "false" lowered_model_name = model_name.lower() model = app.predictors.get(lowered_model_name) if model is None: raise ServerError("unknown model: {}".format(model_name), status_code=400) max_request_length = app.max_request_lengths[lowered_model_name] data = request.get_json() serialized_request = json.dumps(data) if len(serialized_request) > max_request_length: raise ServerError( f"Max request length exceeded for model {model_name}! " + f"Max: {max_request_length} Actual: {len(serialized_request)}") logger.info("request: %s", json.dumps({ "model": model_name, "inputs": data })) log_blob = { "model": model_name, "inputs": data, "cached": False, "outputs": {} } # Record the number of cache hits before we hit the cache so we can tell whether we hit or not. # In theory this could result in false positives. pre_hits = _caching_prediction.cache_info().hits # pylint: disable=no-value-for-parameter if record_to_database and demo_db is not None: try: perma_id = None perma_id = demo_db.insert_request( headers=dict(request.headers), requester=request.remote_addr, model_name=model_name, inputs=data) except Exception: # pylint: disable=broad-except # TODO(joelgrus): catch more specific errors logger.exception("Unable to add request to database", exc_info=True) if use_cache and cache_size > 0: # lru_cache insists that all function arguments be hashable, # so unfortunately we have to stringify the data. prediction = _caching_prediction(model, json.dumps(data)) else: # if cache_size is 0, skip caching altogether prediction = model.predict_json(data) post_hits = _caching_prediction.cache_info().hits # pylint: disable=no-value-for-parameter if record_to_database and demo_db is not None and perma_id is not None: try: demo_db.update_response(perma_id=perma_id, outputs=prediction) slug = int_to_slug(perma_id) prediction["slug"] = slug log_blob["slug"] = slug except Exception: # pylint: disable=broad-except # TODO(joelgrus): catch more specific errors logger.exception("Unable to add response to database", exc_info=True) if use_cache and post_hits > pre_hits: # Cache hit, so insert an artifical pause log_blob["cached"] = True time.sleep(0.25) # The model predictions are extremely verbose, so we only log the most human-readable # parts of them. if "comprehension" in model_name: if 'best_span_str' in prediction: answer = prediction['best_span_str'] else: answer = prediction['answer'] log_blob["outputs"]["answer"] = answer elif model_name == "coreference-resolution": log_blob["outputs"]["clusters"] = prediction["clusters"] log_blob["outputs"]["document"] = prediction["document"] elif model_name == "textual-entailment": log_blob["outputs"]["label_probs"] = prediction["label_probs"] elif model_name == "sentiment-analysis": log_blob["outputs"]["probs"] = prediction["probs"] elif model_name == "named-entity-recognition": log_blob["outputs"]["tags"] = prediction["tags"] elif model_name == "semantic-role-labeling": verbs = [] for verb in prediction["verbs"]: # Don't want to log boring verbs with no semantic parses. good_tags = [tag for tag in verb["tags"] if tag != "0"] if len(good_tags) > 1: verbs.append({ "verb": verb["verb"], "description": verb["description"] }) log_blob["outputs"]["verbs"] = verbs elif model_name == "constituency-parsing": log_blob["outputs"]["trees"] = prediction["trees"] elif model_name == "wikitables-parser": log_blob['outputs']['logical_form'] = prediction['logical_form'] log_blob['outputs']['answer'] = prediction['answer'] elif model_name == "quarel-parser-zero": log_blob['outputs']['logical_form'] = prediction['logical_form'] log_blob['outputs']['answer'] = prediction['answer'] log_blob['outputs']['score'] = prediction['score'] elif model_name == "nlvr-parser": log_blob['outputs']['logical_form'] = prediction['logical_form'][0] log_blob['outputs']['answer'] = prediction['denotations'][0][0] elif model_name == "atis-parser": log_blob['outputs']['predicted_sql_query'] = prediction[ 'predicted_sql_query'] # TODO(brendanr): Add event2mind log_blob here? logger.info("prediction: %s", json.dumps(log_blob)) return jsonify(prediction) @app.route('/attack/<model_name>', methods=['POST', 'OPTIONS']) def attack(model_name: str) -> Response: """ Modify input to change prediction of model """ if request.method == "OPTIONS": return Response(response="", status=200) # Do use the cache if no argument is specified use_cache = request.args.get("cache", "true").lower() != "false" lowered_model_name = model_name.lower() data = request.get_json() attacker_name = data.pop("attacker") input_field_to_attack = data.pop("inputToAttack") grad_input_field = data.pop("gradInput") target = data.pop("target", None) model_attackers = app.attackers.get(lowered_model_name) if model_attackers is None: raise ServerError("unknown model: {}".format(model_name), status_code=400) attacker = model_attackers.get(attacker_name) if attacker is None: raise ServerError("unknown attacker for model: {} {}".format( attacker_name, model_name), status_code=400) max_request_length = app.max_request_lengths[lowered_model_name] serialized_request = json.dumps(data) if len(serialized_request) > max_request_length: raise ServerError( f"Max request length exceeded for model {model_name}! " + f"Max: {max_request_length} Actual: {len(serialized_request)}") pre_hits = _caching_attack.cache_info().hits # pylint: disable=no-value-for-parameter if use_cache and attack_cache_size > 0: # lru_cache insists that all function arguments be hashable, # so unfortunately we have to stringify the data. attack = _caching_attack(attacker, json.dumps(data), input_field_to_attack, grad_input_field, json.dumps(target)) else: # if cache_size is 0, skip caching altogether attack = attacker.attack_from_json( inputs=data, input_field_to_attack=input_field_to_attack, grad_input_field=grad_input_field, target=target) post_hits = _caching_attack.cache_info().hits # pylint: disable=no-value-for-parameter if use_cache and post_hits > pre_hits: # Cache hit, so insert an artifical pause time.sleep(0.25) return jsonify(attack) @app.route('/interpret/<model_name>', methods=['POST', 'OPTIONS']) def interpret(model_name: str) -> Response: """ Interpret prediction of the model """ if request.method == "OPTIONS": return Response(response="", status=200) # Do use the cache if no argument is specified use_cache = request.args.get("cache", "true").lower() != "false" lowered_model_name = model_name.lower() data = request.get_json() interpreter_name = data.pop("interpreter") model_interpreters = app.interpreters.get(lowered_model_name) if model_interpreters is None: raise ServerError( "no interpreters for model: {}".format(model_name), status_code=400) interpreter = model_interpreters.get(interpreter_name) if interpreter is None: raise ServerError("unknown interpreter for model: {} {}".format( interpreter_name, model_name), status_code=400) max_request_length = app.max_request_lengths[lowered_model_name] serialized_request = json.dumps(data) if len(serialized_request) > max_request_length: raise ServerError( f"Max request length exceeded for interpreter {model_name}! " + f"Max: {max_request_length} Actual: {len(serialized_request)}") pre_hits = _caching_interpret.cache_info().hits # pylint: disable=no-value-for-parameter if use_cache and interpret_cache_size > 0: # lru_cache insists that all function arguments be hashable, # so unfortunately we have to stringify the data. interpretation = _caching_interpret(interpreter, json.dumps(data)) else: # if cache_size is 0, skip caching altogether interpretation = interpreter.saliency_interpret_from_json(data) post_hits = _caching_prediction.cache_info().hits # pylint: disable=no-value-for-parameter if use_cache and post_hits > pre_hits: # Cache hit, so insert an artifical pause time.sleep(0.25) return jsonify(interpretation) @app.route('/models') def list_models() -> Response: # pylint: disable=unused-variable """list the available models""" return jsonify({"models": list(app.predictors.keys())}) @app.route('/info') def info() -> Response: # pylint: disable=unused-variable """List metadata about the running webserver""" uptime = str(datetime.now(pytz.utc) - start_time) git_version = os.environ.get('ALLENNLP_DEMO_SOURCE_COMMIT') or "" return jsonify({ "start_time": start_time_str, "uptime": uptime, "git_version": git_version, "peak_memory_mb": peak_memory_mb(), "githubUrl": "http://github.com/allenai/allennlp-demo/commit/" + git_version }) @app.route('/health') def health() -> Response: # pylint: disable=unused-variable return "healthy" # As an SPA, we need to return index.html for /model-name and /model-name/permalink def return_page(permalink: str = None) -> Response: # pylint: disable=unused-argument, unused-variable """return the page""" return send_file(os.path.join(build_dir, 'index.html')) for model_name in models: logger.info(f"setting up default routes for {model_name}") app.add_url_rule(f"/{model_name}", view_func=return_page) app.add_url_rule(f"/{model_name}/<permalink>", view_func=return_page) @app.route('/', defaults={'path': ''}) @app.route('/<path:path>') def static_proxy(path: str) -> Response: # pylint: disable=unused-variable if os.path.isfile(os.path.join(build_dir, path)): return send_from_directory(build_dir, path) else: # Send the index.html page back to the client as a catch-all, since # we're an SPA and JavaScript acts to handle routes the server # doesn't. return send_file(os.path.join(build_dir, 'index.html')) @app.route('/static/js/<path:path>') def static_js_proxy(path: str) -> Response: # pylint: disable=unused-variable return send_from_directory(os.path.join(build_dir, 'static/js'), path) @app.route('/static/css/<path:path>') def static_css_proxy(path: str) -> Response: # pylint: disable=unused-variable return send_from_directory(os.path.join(build_dir, 'static/css'), path) @app.route('/static/media/<path:path>') def static_media_proxy(path: str) -> Response: # pylint: disable=unused-variable return send_from_directory(os.path.join(build_dir, 'static/media'), path) return app
def create_app(config=None, session=None, testing=False, app_name="Airflow"): global app, appbuilder app = Flask(__name__) app.wsgi_app = ProxyFix(app.wsgi_app) app.secret_key = conf.get('webserver', 'SECRET_KEY') airflow_home_path = conf.get('core', 'AIRFLOW_HOME') webserver_config_path = airflow_home_path + '/webserver_config.py' app.config.from_pyfile(webserver_config_path, silent=True) app.config['APP_NAME'] = app_name app.config['TESTING'] = testing csrf.init_app(app) db = SQLA(app) from airflow import api api.load_auth() api.api_auth.init_app(app) cache = Cache(app=app, config={'CACHE_TYPE': 'filesystem', 'CACHE_DIR': '/tmp'}) # noqa from airflow.www_rbac.blueprints import routes app.register_blueprint(routes) configure_logging() configure_manifest_files(app) with app.app_context(): from airflow.www_rbac.security import AirflowSecurityManager security_manager_class = app.config.get('SECURITY_MANAGER_CLASS') or \ AirflowSecurityManager if not issubclass(security_manager_class, AirflowSecurityManager): raise Exception( """Your CUSTOM_SECURITY_MANAGER must now extend AirflowSecurityManager, not FAB's security manager.""") appbuilder = AppBuilder( app, db.session if not session else session, security_manager_class=security_manager_class, base_template='appbuilder/baselayout.html') def init_views(appbuilder): from airflow.www_rbac import views appbuilder.add_view_no_menu(views.Airflow()) appbuilder.add_view_no_menu(views.DagModelView()) appbuilder.add_view_no_menu(views.ConfigurationView()) appbuilder.add_view_no_menu(views.VersionView()) appbuilder.add_view(views.DagRunModelView, "DAG Runs", category="Browse", category_icon="fa-globe") appbuilder.add_view(views.JobModelView, "Jobs", category="Browse") appbuilder.add_view(views.LogModelView, "Logs", category="Browse") appbuilder.add_view(views.SlaMissModelView, "SLA Misses", category="Browse") appbuilder.add_view(views.TaskInstanceModelView, "Task Instances", category="Browse") appbuilder.add_link("Configurations", href='/configuration', category="Admin", category_icon="fa-user") appbuilder.add_view(views.ConnectionModelView, "Connections", category="Admin") appbuilder.add_view(views.PoolModelView, "Pools", category="Admin") appbuilder.add_view(views.VariableModelView, "Variables", category="Admin") appbuilder.add_view(views.XComModelView, "XComs", category="Admin") appbuilder.add_link("Documentation", href='https://airflow.apache.org/', category="Docs", category_icon="fa-cube") appbuilder.add_link("Github", href='https://github.com/apache/incubator-airflow', category="Docs") appbuilder.add_link('Version', href='/version', category='About', category_icon='fa-th') # Garbage collect old permissions/views after they have been modified. # Otherwise, when the name of a view or menu is changed, the framework # will add the new Views and Menus names to the backend, but will not # delete the old ones. init_views(appbuilder) security_manager = appbuilder.sm security_manager.sync_roles() from airflow.www_rbac.api.experimental import endpoints as e # required for testing purposes otherwise the module retains # a link to the default_auth if app.config['TESTING']: if six.PY2: reload(e) # noqa else: import importlib importlib.reload(e) app.register_blueprint(e.api_experimental, url_prefix='/api/experimental') @app.context_processor def jinja_globals(): return { 'hostname': socket.getfqdn(), 'navbar_color': conf.get('webserver', 'NAVBAR_COLOR'), } @app.teardown_appcontext def shutdown_session(exception=None): settings.Session.remove() return app, appbuilder
def _sr_callback(self, start_response): def callback(status, headers, exc_info=None): # Call upstream start_response start_response(status, headers, exc_info) return callback app = Flask(__name__) if os.environ.get('ENABLE_CORS', config.ENABLE_CORS): cors = CORS(app, resources={r"*": {"origins": os.environ.get('CORS_ORIGINS', config.CORS_ORIGINS)}}) from werkzeug.contrib.fixers import ProxyFix app.wsgi_app = WSGIRawBody(ProxyFix(app.wsgi_app)) app.debug = config.DEBUG app.secret_key = config.FLASK_SESSION_SECRET_KEY app.root_path = os.path.abspath(os.path.dirname(__file__)) if config.BUGSNAG_KEY: import bugsnag from bugsnag.flask import handle_exceptions bugsnag.configure( api_key=config.BUGSNAG_KEY, project_root=app.root_path, # 'production' is a magic string for bugsnag, rest are arbitrary release_stage = config.REALM.replace("prod", "production"), notify_release_stages=["production", "test"], use_ssl = True
#라우팅이 설정된 함수의 URL을 얻기위해 실제 요청처럼 처리 with app.test_request_context(): print(url_for('hello_world')) print(url_for('show_user_profile', username='******')) print(url_for('login', next='/')) print(url_for('static', filename='style.css')) #ToDO ? with app.test_request_context('/hello', method='POST'): assert request.path == '/hello' assert request.method == 'POST' #ToDo ? # with app.request_context(environ): # assert request.method == 'POST' #TODO ? 메시지 플래싱? #로깅 app.logger.debug('A value for debugging') app.logger.warning('A warning occurred (%d apples)', 42) app.logger.error('An error occurred') #미들웨어에서 후킹 from werkzeug.contrib.fixers import LighttpdCGIRootFix app.wsgi_app = LighttpdCGIRootFix(app.wsgi_app) #실행 if __name__ == '__main__': app.run() # app.run(host='0.0.0.0')
app_host = '0.0.0.0' app_port = 10101 app_debug = False if not app_debug: from gevent import monkey monkey.patch_all() logger = logging.getLogger('server') db = pymongo.MongoClient(**config.mongo_kwargs)[config.mongo_db_name] fs = gridfs.GridFS(db) app = Flask(__name__) app.wsgi_app = ProxyFix(app.wsgi_app, x_for=1, x_proto=1, x_host=1) def default(obj): if isinstance(obj, datetime.datetime): return obj.ctime() if isinstance(obj, ObjectId): return str(obj) return obj def to_json(obj): return json.dumps(obj, indent=None, ensure_ascii=False, separators=(',', ':'),
from werkzeug.contrib.fixers import ProxyFix from flask import Flask import json # Initialize the application app = Flask(__name__) app.wsgi_app = ProxyFix(app.wsgi_app, num_proxies=1) # Determine the type of slash this OS uses from App.modules.helpers.helpers import determine_slash_type slash = determine_slash_type() # Get configuration class Config: with open(f'{app.root_path}{slash}config{slash}config.json') as config_file: config = json.load(config_file) USERNAME = config.get('username') PASSWORD = config.get('password') SECRET_KEY = config.get('secret_key') GITHUB_OAUTH = config.get('github_oauth') # Configure the application with config app.secret_key = Config.SECRET_KEY app.config.from_object(Config)
import socket import instana from instana.middleware import InstanaWSGIMiddleware os.system("npx openapi-comment-parser . app/swagger/openapi.json") app = Flask( __name__, static_folder=os.path.join(os.getcwd(), "app/swagger"), ) app.register_blueprint(health_blueprint) app.register_blueprint(hotels_blueprint, url_prefix="/api/v1/hotels") register_metrics(app) app.wsgi_app = DispatcherMiddleware(app.wsgi_app, {"/metrics": make_wsgi_app()}) app.wsgi_app = InstanaWSGIMiddleware(app.wsgi_app) @app.route("/api-docs", defaults={"path": ""}) @app.route("/<path:path>") def serve(path): if path != "" and os.path.exists(app.static_folder + "/" + path): return send_from_directory(app.static_folder, path) else: return send_from_directory(app.static_folder, "index.html") @app.route("/info", methods=["GET"]) def info():
def run(app: flask.Flask, *, host='127.0.0.1', port=None, debug=False, loop=None): """Run Flask application on aiohttp :param app: Flask application :param host: host name or ip :param port: port (default is 5000) :param debug: debug? """ # Check initialization status of flask app. if getattr(app, 'aiohttp_app', None) is None: raise RuntimeError( "This application is not initialized for Flask-aiohttp. " "Please initialize the app by `aio.init_app(app)`.") # Configure args if port is None: server_name = app.config['SERVER_NAME'] if server_name and ':' in server_name: port = int(server_name.rsplit(':', 1)[-1]) else: port = 5000 loop = loop or asyncio.get_event_loop() # Define run_server def run_server(): # run_server can be called in another thread asyncio.set_event_loop(loop) coroutine = loop.create_server(app.aiohttp_app.make_handler(), host, port) loop.run_until_complete(coroutine) try: loop.run_forever() except KeyboardInterrupt: pass # Configure logging file_handler = logging.StreamHandler() app.logger.setLevel(logging.INFO) app.logger.addHandler(file_handler) if debug: # Logging app.logger.setLevel(logging.DEBUG) # Wrap WSGI app with werkzeug debugger. app.wsgi_app = wrap_wsgi_middleware(DebuggedApplication)( app.wsgi_app) if os.environ.get('WERKZEUG_RUN_MAIN') != 'true': app.logger.info(' * Running on http://{}:{}/'.format( host, port)) # Run with reloader run_with_reloader(run_server) else: app.logger.info(' * Running on http://{}:{}/'.format(host, port)) run_server()
if os.path.exists("instance/config.py"): app.config.from_pyfile('config.py') if os.environ.get('APP_CONFIG_FILE', None): app.config.from_envvar('APP_CONFIG_FILE') ### Flask Configurations #### app.secret_key = app.config["FLASK_SECRET_KEY"] ### Blueprints Configuration ### app.register_blueprint(pages) #### Set the correct application root #### if app.config["APPLICATION_ROOT"] != "" and app.config[ "APPLICATION_ROOT"] != None: app.wsgi_app = DispatcherMiddleware( app.wsgi_app, {app.config["APPLICATION_ROOT"]: app.wsgi_app}) else: app.config["APPLICATION_ROOT"] = None #### Proxy fix if your app is behind Proxy ##### if app.config["BEHIND_PROXY"]: from werkzeug.contrib.fixers import ProxyFix app.wsgi_app = ProxyFix(app.wsgi_app) @app.errorhandler(500) def error(e): return render_template('error.html'), 500
def create_app(): app = Flask(__name__) app.wsgi_app = ProxyFix(app.wsgi_app) app.config.from_pyfile('config.py') db.init_app(app) migrate = Migrate(app, db) app.register_blueprint(settings, url_prefix='/settings') login_manager = LoginManager() login_manager.init_app(app) login_manager.login_view = 'login' @login_manager.user_loader def load_user(user_id): return Users.query.get(user_id) @app.route('/') @login_required def index(): title = 'Photo Analyzer' if current_user.is_authenticated: user = current_user.login else: user = '******' return render_template('index.html', page_title=title, user=user) @app.route('/login') def login(): if current_user.is_authenticated: return redirect(url_for('index')) title = 'Авторизация' login_form = LoginForm() return render_template('login.html', page_title=title, form=login_form) @app.route('/process-login', methods=['POST']) def process_login(): form = LoginForm() if form.validate_on_submit(): user = Users.query.filter(Users.login == form.username.data).first() if user and user.check_password(form.password.data): login_user(user) flash('Вы успешно зашли на сайт') return redirect(url_for('index')) flash('Ошибка входа') return redirect(url_for('login')) @app.route('/logout') def logout(): logout_user() flash('Вы успешно разлогинились') return redirect(url_for('index')) @app.route('/search') def search(): class_list = [] # Получаем список классов фильтра из GET-запроса for cl in classes_dict_list: if request.args.get(cl['label'], 'false') != 'false': class_list.append(cl['label']) len_class_list = len(class_list) app.logger.info('classes=%s (%s)', class_list, len_class_list) # Получаем даты начала и конца периода создания фотографии из GET-запроса start_date = request.args.get('start_date', '1970-01-01') end_date = request.args.get('end_date', '3000-01-01') app.logger.info('start_date=%s', start_date) app.logger.info('end_date=%s', end_date) # Получаем значения радио-переключателя И/ИЛИ or_radio = request.args.get('or_radio', 'true') and_radio = request.args.get('and_radio', 'false') app.logger.info('or_radio=%s', or_radio) app.logger.info('and_radio=%s', and_radio) # Заполняем пустые значения границ периода if start_date != '': start_date = datetime.strptime(start_date, '%Y-%m-%d') else: start_date = datetime.strptime('1970-01-01', '%Y-%m-%d') if end_date != '': end_date = datetime.strptime(end_date, '%Y-%m-%d') else: end_date = datetime.strptime('3000-01-01', '%Y-%m-%d') try: current_user_pref = UserPreferences.query.filter(UserPreferences.user_id==current_user.id).first() threshold = current_user_pref.classification_threshold # Исключаем ошибку при получении целых значений порога отнесения if threshold > 1: threshold = threshold / 100 print(start_date, end_date, threshold) print(type(start_date), type(end_date), type(threshold)) # Формируем запрос в базу if or_radio == 'true': sub = db.session.query(db.func.max(Algorithms.create_date).label('max_date')).subquery() selected_photos = db.session.query((Photos.id).label("id"), (Photos.name).label("name"), \ (Photos.path).label("folder_path")). \ join(photosclasses, Classes, Folders, StorageUsers, Users, Algorithms). \ filter(photosclasses.c.weight>threshold, Classes.name.in_(class_list), \ Users.id==current_user.id, Algorithms.create_date==sub.c.max_date, \ Photos.create_date.between(start_date, end_date)). \ distinct(). \ all() else: sub = db.session.query(db.func.max(Algorithms.create_date).label('max_date')).subquery() selected_photos = db.session.query((Photos.id).label("id"), (Photos.name).label("name"), \ (Photos.path).label("folder_path")). \ join(photosclasses, Classes, Folders, StorageUsers, Users, Algorithms). \ filter(photosclasses.c.weight>threshold, Classes.name.in_(class_list), \ Users.id==current_user.id, Algorithms.create_date==sub.c.max_date, \ Photos.create_date.between(start_date, end_date)). \ distinct().group_by(Photos.id, Photos.name, Photos.path).\ having(func.count(Classes.name) == len_class_list).all() print(selected_photos) # Формируем html текст с результатами поиска в базе ph_list = [] for ph in selected_photos: # Получаем id классов для каждый из полученных фотографий ph_cl = db.session.query((photosclasses.c.class_id).label("ph_class")). \ filter(photosclasses.c.photo_id==ph.id, photosclasses.c.weight>threshold).order_by(photosclasses.c.class_id.desc()).all() # Формируем HTML-строку для списка ph_str = '<li class="list-group-item"><a href="https://www.dropbox.com/preview{2}?personal" target="_blank">{0} {1}</a>\n' \ .format( str(ph.id), #0 str(ph.name), #1 #str(ph.class_name), #2 #str(ph.weight), #3 str(ph.folder_path) #4 ) # Добавляем цветные лейблы классов for cl in ph_cl: ph_str += '<div class="{0} float-right">{1}</div>'.format(classes_dict_list[cl.ph_class-1]['bootstrap_class'], classes_dict_list[cl.ph_class-1]['label']) ph_str += '</li>' ph_list.append(ph_str) #print(ph_list) # Возвращаем в ajax результат поиска в базе return jsonify(result=ph_list) except Exception as e: print(e) return jsonify(result='error') return app
def create_app(config=None, session=None, testing=False, app_name="Airflow"): global app, appbuilder app = Flask(__name__) if conf.getboolean('webserver', 'ENABLE_PROXY_FIX'): app.wsgi_app = ProxyFix( app.wsgi_app, num_proxies=None, x_for=1, x_proto=1, x_host=1, x_port=1, x_prefix=1 ) app.secret_key = conf.get('webserver', 'SECRET_KEY') app.config.from_pyfile(settings.WEBSERVER_CONFIG, silent=True) app.config['APP_NAME'] = app_name app.config['TESTING'] = testing app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False app.config['SESSION_COOKIE_HTTPONLY'] = True app.config['SESSION_COOKIE_SECURE'] = conf.getboolean('webserver', 'COOKIE_SECURE') app.config['SESSION_COOKIE_SAMESITE'] = conf.get('webserver', 'COOKIE_SAMESITE') if config: app.config.from_mapping(config) # Configure the JSON encoder used by `|tojson` filter from Flask app.json_encoder = AirflowJsonEncoder csrf.init_app(app) db = SQLA(app) from airflow import api api.load_auth() api.API_AUTH.api_auth.init_app(app) Cache(app=app, config={'CACHE_TYPE': 'filesystem', 'CACHE_DIR': '/tmp'}) from airflow.www.blueprints import routes app.register_blueprint(routes) configure_logging() configure_manifest_files(app) with app.app_context(): from airflow.www.security import AirflowSecurityManager security_manager_class = app.config.get('SECURITY_MANAGER_CLASS') or \ AirflowSecurityManager if not issubclass(security_manager_class, AirflowSecurityManager): raise Exception( """Your CUSTOM_SECURITY_MANAGER must now extend AirflowSecurityManager, not FAB's security manager.""") appbuilder = AppBuilder( app, db.session if not session else session, security_manager_class=security_manager_class, base_template='appbuilder/baselayout.html') def init_views(appbuilder): from airflow.www import views # Remove the session from scoped_session registry to avoid # reusing a session with a disconnected connection appbuilder.session.remove() appbuilder.add_view_no_menu(views.Airflow()) appbuilder.add_view_no_menu(views.DagModelView()) appbuilder.add_view_no_menu(views.ConfigurationView()) appbuilder.add_view_no_menu(views.VersionView()) appbuilder.add_view(views.DagRunModelView, "DAG Runs", category="Browse", category_icon="fa-globe") appbuilder.add_view(views.JobModelView, "Jobs", category="Browse") appbuilder.add_view(views.LogModelView, "Logs", category="Browse") appbuilder.add_view(views.SlaMissModelView, "SLA Misses", category="Browse") appbuilder.add_view(views.TaskInstanceModelView, "Task Instances", category="Browse") appbuilder.add_link("Configurations", href='/configuration', category="Admin", category_icon="fa-user") appbuilder.add_view(views.ConnectionModelView, "Connections", category="Admin") appbuilder.add_view(views.PoolModelView, "Pools", category="Admin") appbuilder.add_view(views.VariableModelView, "Variables", category="Admin") appbuilder.add_view(views.XComModelView, "XComs", category="Admin") appbuilder.add_link("Documentation", href='https://airflow.apache.org/', category="Docs", category_icon="fa-cube") appbuilder.add_link("GitHub", href='https://github.com/apache/airflow', category="Docs") appbuilder.add_link('Version', href='/version', category='About', category_icon='fa-th') def integrate_plugins(): """Integrate plugins to the context""" from airflow.plugins_manager import ( flask_appbuilder_views, flask_appbuilder_menu_links ) for v in flask_appbuilder_views: log.debug("Adding view %s", v["name"]) appbuilder.add_view(v["view"], v["name"], category=v["category"]) for ml in sorted(flask_appbuilder_menu_links, key=lambda x: x["name"]): log.debug("Adding menu link %s", ml["name"]) appbuilder.add_link(ml["name"], href=ml["href"], category=ml["category"], category_icon=ml["category_icon"]) integrate_plugins() # Garbage collect old permissions/views after they have been modified. # Otherwise, when the name of a view or menu is changed, the framework # will add the new Views and Menus names to the backend, but will not # delete the old ones. def init_plugin_blueprints(app): from airflow.plugins_manager import flask_blueprints for bp in flask_blueprints: log.debug("Adding blueprint %s:%s", bp["name"], bp["blueprint"].import_name) app.register_blueprint(bp["blueprint"]) init_views(appbuilder) init_plugin_blueprints(app) security_manager = appbuilder.sm security_manager.sync_roles() from airflow.www.api.experimental import endpoints as e # required for testing purposes otherwise the module retains # a link to the default_auth if app.config['TESTING']: import importlib importlib.reload(e) app.register_blueprint(e.api_experimental, url_prefix='/api/experimental') @app.context_processor def jinja_globals(): # pylint: disable=unused-variable globals = { 'hostname': socket.getfqdn(), 'navbar_color': conf.get('webserver', 'NAVBAR_COLOR'), } if 'analytics_tool' in conf.getsection('webserver'): globals.update({ 'analytics_tool': conf.get('webserver', 'ANALYTICS_TOOL'), 'analytics_id': conf.get('webserver', 'ANALYTICS_ID') }) return globals @app.teardown_appcontext def shutdown_session(exception=None): # pylint: disable=unused-variable settings.Session.remove() return app, appbuilder
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Scheme $scheme; proxy_set_header X-Script-Name /myprefix; } :param app: the WSGI application ''' def __init__(self, app): self.app = app def __call__(self, environ, start_response): script_name = environ.get('HTTP_X_SCRIPT_NAME', '') if script_name: environ['SCRIPT_NAME'] = script_name path_info = environ['PATH_INFO'] if path_info.startswith(script_name): environ['PATH_INFO'] = path_info[len(script_name):] scheme = environ.get('HTTP_X_SCHEME', '') if scheme: environ['wsgi.url_scheme'] = scheme return self.app(environ, start_response) application = app # patch app to handle non root url-s behind proxy & wsgi app.wsgi_app = ReverseProxyPathFix(ProxyFix(application.wsgi_app)) if __name__ == "__main__": run()
from flask import Flask, jsonify from whitenoise import WhiteNoise app = Flask(__name__) app.wsgi_app = WhiteNoise( app.wsgi_app, root="static/", index_file=True, autorefresh=True ) if __name__ == "__main__": app.run()
return make_response("", 404) @app.route("/ssp.v1/tenant-ports/<tenant_port_uuid>", methods=[ "PUT", ]) def update_tenant_port(tenant_port_uuid): if "login" not in request.environ["beaker.session"]: return make_response("", 401) for port in tenant_ports: if port["uuid"] == tenant_port_uuid: obj = request.json obj["uuid"] = tenant_port_uuid obj["vlan_id"] = 100 tenant_ports.remove(port) tenant_ports.append(obj) res = make_response(json.dumps(obj), 200) res.headers["Content-type"] = "application/json" return res return make_response("", 404) if __name__ == "__main__": app.wsgi_app = SessionMiddleware( app.wsgi_app, { "session.auto": True, "session.type": "cookie", "session.validate_key": "hoge" }) app.run(host="0.0.0.0", port=9080, debug=True)
app.register_blueprint(sites_blueprint, url_prefix='/sites') app.register_blueprint(wqx, url_prefix='/portal/schemas') # Set up swagger endpoints @app.route('/spec') def spec(): host = request.url_root.rstrip('/').replace(app.config['WSGI_STR'], '') return jsonify( swagger(app, from_file_keyword="swagger_from_file", template={ "host": host.replace('http://', ''), "info": { "version": "1.0", "title": "WQP Sites service" } })) # Create swagger ui blueprint SWAGGER_URL = '/apidocs' API_VIEW_FUNC = 'spec' swaggerui_blueprint = get_swaggerui_blueprint(api_view_func=API_VIEW_FUNC) app.register_blueprint(swaggerui_blueprint, url_prefix=SWAGGER_URL) app.wsgi_app = WhiteNoise(app.wsgi_app, root='/home/python/assets', prefix='static/')
def __call__(self, environ, start_response): stream = LimitedStream(environ['wsgi.input'], int(environ['CONTENT_LENGTH'] or 1)) environ['wsgi.input'] = stream app_iter = self.app(environ, start_response) try: stream.exhaust() for event in app_iter: yield event finally: if hasattr(app_iter, 'close'): app_iter.close() app.config['UPLOAD_FOLDER'] = 'static/Uploads' app.wsgi_app = StreamConsumingMiddleware(app.wsgi_app) @app.route('/') def main(): return render_template('index.html') @app.route('/upload', methods=['GET', 'POST']) def upload(): if request.method == 'POST': file = request.files['file'] extension = os.path.splitext(file.filename)[1] f_name = str(uuid.uuid4()) + extension file.save(os.path.join(app.config['UPLOAD_FOLDER'], f_name)) return json.dumps({'filename':f_name})
from flask import Flask from flask_sqlalchemy import SQLAlchemy from sqlalchemy_imageattach.stores.fs import HttpExposedFileSystemStore import config app = Flask(__name__) app.config.from_object('config') db = SQLAlchemy(app) image_store = HttpExposedFileSystemStore(path=config.IMAGE_STORE_PATH, prefix=config.IMAGE_STORE_PREFIX) app.wsgi_app = image_store.wsgi_middleware(app.wsgi_app) # noinspection PyUnresolvedReferences from flaskbook.orm import models # noinspection PyUnresolvedReferences from flaskbook.ui import views
result = predict(file_path) label = folderList[result] print(result) print(file_path) filename = my_random_string(6) + filename os.rename(file_path, os.path.join(app.config['UPLOAD_FOLDER'], filename)) print("--- %s seconds ---" % str(time.time() - start_time)) return render_template('template.html', label=label, imagesource='../uploads/' + filename) from flask import send_from_directory @app.route('/uploads/<filename>') def uploaded_file(filename): return send_from_directory(app.config['UPLOAD_FOLDER'], filename) from werkzeug import SharedDataMiddleware app.add_url_rule('/uploads/<filename>', 'uploaded_file', build_only=True) app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {'/uploads': app.config['UPLOAD_FOLDER']}) if __name__ == "__main__": app.debug = False app.run(debug=True)
from flask_assets import Environment, Bundle # type: ignore from flask_compress import Compress # type: ignore from flask_cors import CORS # type: ignore from flask_talisman import Talisman # type: ignore from os import getenv from werkzeug.middleware.proxy_fix import ProxyFix from .core import core_app, migrations as core_migrations from .helpers import ExtensionManager from .settings import FORCE_HTTPS disabled_extensions = getenv("LNBITS_DISABLED_EXTENSIONS", "").split(",") app = Flask(__name__) app.wsgi_app = ProxyFix(app.wsgi_app, x_proto=1, x_host=1) # type: ignore valid_extensions = [ext for ext in ExtensionManager(disabled=disabled_extensions).extensions if ext.is_valid] # optimization & security # ----------------------- Compress(app) CORS(app) Talisman( app, force_https=FORCE_HTTPS, content_security_policy={ "default-src": ["'self'", "'unsafe-eval'", "'unsafe-inline'", "blob:", "api.opennode.co",] }, )