Example #1
0
def make_app(build_dir: str = None,
             demo_db: Optional[DemoDatabase] = None) -> Flask:
    if build_dir is None:
        build_dir = os.path.join(DEMO_DIR, 'build')

    if not os.path.exists(build_dir):
        logger.error("app directory %s does not exist, aborting", build_dir)
        sys.exit(-1)

    app = Flask(__name__)  # pylint: disable=invalid-name
    start_time = datetime.now(pytz.utc)
    start_time_str = start_time.strftime("%Y-%m-%d %H:%M:%S %Z")

    app.predictors = {}
    app.wsgi_app = ProxyFix(
        app.wsgi_app)  # sets the requester IP with the X-Forwarded-For header

    try:
        cache_size = int(CACHE_SIZE)  # type: ignore
    except ValueError:
        logger.warning("unable to parse cache size %s as int, disabling cache",
                       CACHE_SIZE)
        cache_size = 0

    @app.errorhandler(ServerError)
    def handle_invalid_usage(error: ServerError) -> Response:  # pylint: disable=unused-variable
        response = jsonify(error.to_dict())
        response.status_code = error.status_code
        return response

    @lru_cache(maxsize=cache_size)
    def _caching_prediction(model: Predictor, data: str) -> JsonDict:
        """
        Just a wrapper around ``model.predict_json`` that allows us to use a cache decorator.
        """
        return model.predict_json(json.loads(data))

    @app.route('/')
    def index() -> Response:  # pylint: disable=unused-variable
        return send_file(os.path.join(build_dir, 'index.html'))

    @app.route('/permadata', methods=['POST', 'OPTIONS'])
    def permadata() -> Response:  # pylint: disable=unused-variable
        """
        If the user requests a permalink, the front end will POST here with the payload
            { slug: slug }
        which we convert to an integer id and use to retrieve saved results from the database.
        """
        # This is just CORS boilerplate.
        if request.method == "OPTIONS":
            return Response(response="", status=200)

        # If we don't have a database configured, there are no permalinks.
        if demo_db is None:
            raise ServerError('Permalinks are not enabled', 400)

        # Convert the provided slug to an integer id.
        slug = request.get_json()["slug"]
        perma_id = slug_to_int(slug)
        if perma_id is None:
            # Malformed slug
            raise ServerError("Unrecognized permalink: {}".format(slug), 400)

        # Fetch the results from the database.
        try:
            permadata = demo_db.get_result(perma_id)
        except psycopg2.Error:
            logger.exception(
                "Unable to get results from database: perma_id %s", perma_id)
            raise ServerError('Database trouble', 500)

        if permadata is None:
            # No data found, invalid id?
            raise ServerError("Unrecognized permalink: {}".format(slug), 400)

        return jsonify({
            "modelName": permadata.model_name,
            "requestData": permadata.request_data,
            "responseData": permadata.response_data
        })

    @app.route('/predict/<model_name>', methods=['POST', 'OPTIONS'])
    def predict(model_name: str) -> Response:  # pylint: disable=unused-variable
        """make a prediction using the specified model and return the results"""
        if request.method == "OPTIONS":
            return Response(response="", status=200)

        # Do log if no argument is specified
        record_to_database = request.args.get("record",
                                              "true").lower() != "false"

        # Do use the cache if no argument is specified
        use_cache = request.args.get("cache", "true").lower() != "false"

        model = app.predictors.get(model_name.lower())
        if model is None:
            raise ServerError("unknown model: {}".format(model_name),
                              status_code=400)

        data = request.get_json()

        log_blob = {
            "model": model_name,
            "inputs": data,
            "cached": False,
            "outputs": {}
        }

        # Record the number of cache hits before we hit the cache so we can tell whether we hit or not.
        # In theory this could result in false positives.
        pre_hits = _caching_prediction.cache_info().hits  # pylint: disable=no-value-for-parameter

        if use_cache and cache_size > 0:
            # lru_cache insists that all function arguments be hashable,
            # so unfortunately we have to stringify the data.
            prediction = _caching_prediction(model, json.dumps(data))
        else:
            # if cache_size is 0, skip caching altogether
            prediction = model.predict_json(data)

        post_hits = _caching_prediction.cache_info().hits  # pylint: disable=no-value-for-parameter

        if record_to_database and demo_db is not None:
            try:
                perma_id = None
                perma_id = demo_db.add_result(headers=dict(request.headers),
                                              requester=request.remote_addr,
                                              model_name=model_name,
                                              inputs=data,
                                              outputs=prediction)
                if perma_id is not None:
                    slug = int_to_slug(perma_id)
                    prediction["slug"] = slug
                    log_blob["slug"] = slug

            except Exception:  # pylint: disable=broad-except
                # TODO(joelgrus): catch more specific errors
                logger.exception("Unable to add result to database",
                                 exc_info=True)

        if use_cache and post_hits > pre_hits:
            # Cache hit, so insert an artifical pause
            log_blob["cached"] = True
            time.sleep(0.25)

        # The model predictions are extremely verbose, so we only log the most human-readable
        # parts of them.
        if model_name == "machine-comprehension":
            log_blob["outputs"]["best_span_str"] = prediction["best_span_str"]
        elif model_name == "coreference-resolution":
            log_blob["outputs"]["clusters"] = prediction["clusters"]
            log_blob["outputs"]["document"] = prediction["document"]
        elif model_name == "textual-entailment":
            log_blob["outputs"]["label_probs"] = prediction["label_probs"]
        elif model_name == "named-entity-recognition":
            log_blob["outputs"]["tags"] = prediction["tags"]
        elif model_name == "semantic-role-labeling":
            verbs = []
            for verb in prediction["verbs"]:
                # Don't want to log boring verbs with no semantic parses.
                good_tags = [tag for tag in verb["tags"] if tag != "0"]
                if len(good_tags) > 1:
                    verbs.append({
                        "verb": verb["verb"],
                        "description": verb["description"]
                    })
            log_blob["outputs"]["verbs"] = verbs

        elif model_name == "constituency-parsing":
            log_blob["outputs"]["trees"] = prediction["trees"]
        elif model_name == "wikitables-parser":
            log_blob['outputs']['logical_form'] = prediction['logical_form']
            log_blob['outputs']['answer'] = prediction['answer']
        elif model_name == "quarel-parser-zero":
            log_blob['outputs']['logical_form'] = prediction['logical_form']
            log_blob['outputs']['answer'] = prediction['answer']
            log_blob['outputs']['score'] = prediction['score']
        elif model_name == "nlvr-parser":
            log_blob['outputs']['logical_form'] = prediction['logical_form'][0]
            log_blob['outputs']['answer'] = prediction['denotations'][0][0]
        elif model_name == "atis-parser":
            log_blob['outputs']['predicted_sql_query'] = prediction[
                'predicted_sql_query']
        # TODO(brendanr): Add event2mind log_blob here?

        logger.info("prediction: %s", json.dumps(log_blob))

        return jsonify(prediction)

    @app.route('/models')
    def list_models() -> Response:  # pylint: disable=unused-variable
        """list the available models"""
        return jsonify({"models": list(app.predictors.keys())})

    @app.route('/info')
    def info() -> Response:  # pylint: disable=unused-variable
        """List metadata about the running webserver"""
        uptime = str(datetime.now(pytz.utc) - start_time)
        git_version = os.environ.get('ALLENNLP_DEMO_SOURCE_COMMIT') or ""
        return jsonify({
            "start_time":
            start_time_str,
            "uptime":
            uptime,
            "git_version":
            git_version,
            "peak_memory_mb":
            peak_memory_mb(),
            "githubUrl":
            "http://github.com/allenai/allennlp-demo/commit/" + git_version
        })

    # As an SPA, we need to return index.html for /model-name and /model-name/permalink.
    @app.route('/open-information-extraction')
    @app.route('/semantic-role-labeling')
    @app.route('/constituency-parsing')
    @app.route('/dependency-parsing')
    @app.route('/machine-comprehension')
    @app.route('/textual-entailment')
    @app.route('/coreference-resolution')
    @app.route('/named-entity-recognition')
    @app.route('/fine-grained-named-entity-recognition')
    @app.route('/wikitables-parser')
    @app.route('/quarel-parser-zero')
    @app.route('/event2mind')
    @app.route('/open-information-extraction/<permalink>')
    @app.route('/atis-parser')
    @app.route('/semantic-role-labeling/<permalink>')
    @app.route('/constituency-parsing/<permalink>')
    @app.route('/dependency-parsing/<permalink>')
    @app.route('/machine-comprehension/<permalink>')
    @app.route('/textual-entailment/<permalink>')
    @app.route('/coreference-resolution/<permalink>')
    @app.route('/named-entity-recognition/<permalink>')
    @app.route('/event2mind/<permalink>')
    @app.route('/wikitables-parser/<permalink>')
    @app.route('/quarel-parser-zero/<permalink>')
    @app.route('/atis-parser/<permalink>')
    def return_page(permalink: str = None) -> Response:  # pylint: disable=unused-argument, unused-variable
        """return the page"""
        return send_file(os.path.join(build_dir, 'index.html'))

    @app.route('/<path:path>')
    def static_proxy(path: str) -> Response:  # pylint: disable=unused-variable
        return send_from_directory(build_dir, path)

    @app.route('/static/js/<path:path>')
    def static_js_proxy(path: str) -> Response:  # pylint: disable=unused-variable
        return send_from_directory(os.path.join(build_dir, 'static/js'), path)

    @app.route('/static/css/<path:path>')
    def static_css_proxy(path: str) -> Response:  # pylint: disable=unused-variable
        return send_from_directory(os.path.join(build_dir, 'static/css'), path)

    return app
Example #2
0
import os
from werkzeug.serving import run_simple
from werkzeug.wsgi import DispatcherMiddleware
from werkzeug.contrib.fixers import ProxyFix
from xinshangjia import frontend


if "PSYCOGREEN" in os.environ:

    # Do our monkey patching
    #
    from gevent.monkey import patch_all

    patch_all()
    from psycogreen.gevent import patch_psycopg

    patch_psycopg()

frontend_app = frontend.create_app()

application = ProxyFix(DispatcherMiddleware(frontend_app,
                                            {
                                                '/xinshangjia': frontend_app,
                                            }))

if __name__ == "__main__":
    run_simple("0.0.0.0", 7100, application, use_debugger=True)
Example #3
0
def create_app(config=None, testing=False):

    log = LoggingMixin().log

    app = Flask(__name__)
    if configuration.conf.getboolean('webserver', 'ENABLE_PROXY_FIX'):
        app.wsgi_app = ProxyFix(app.wsgi_app)
    app.secret_key = configuration.conf.get('webserver', 'SECRET_KEY')
    app.config['LOGIN_DISABLED'] = not configuration.conf.getboolean(
        'webserver', 'AUTHENTICATE')

    csrf.init_app(app)

    app.config['TESTING'] = testing

    airflow.load_login()
    airflow.login.login_manager.init_app(app)

    from airflow import api
    api.load_auth()
    api.api_auth.init_app(app)

    # flake8: noqa: F841
    cache = Cache(app=app, config={'CACHE_TYPE': 'filesystem', 'CACHE_DIR': '/tmp'})

    app.register_blueprint(routes)

    configure_logging()

    with app.app_context():
        from airflow.www import views

        admin = Admin(
            app, name='Airflow',
            static_url_path='/admin',
            index_view=views.HomeView(endpoint='', url='/admin', name="DAGs"),
            template_mode='bootstrap3',
        )
        av = admin.add_view
        vs = views
        av(vs.Airflow(name='DAGs', category='DAGs'))

        if not conf.getboolean('core', 'secure_mode'):
            av(vs.QueryView(name='Ad Hoc Query', category="Data Profiling"))
            av(vs.ChartModelView(
                models.Chart, Session, name="Charts", category="Data Profiling"))
        av(vs.SlaMissModelView(
            models.SlaMiss,
            Session, name="SLA Misses", category="Browse"))
        av(vs.TaskInstanceModelView(models.TaskInstance,
            Session, name="Task Instances", category="Browse"))
        av(vs.LogModelView(
            models.Log, Session, name="Logs", category="Browse"))
        av(vs.JobModelView(
            jobs.BaseJob, Session, name="Jobs", category="Browse"))
        av(vs.PoolModelView(
            models.Pool, Session, name="Pools", category="Admin"))
        av(vs.ConfigurationView(
            name='Configuration', category="Admin"))
        av(vs.UserModelView(
            models.User, Session, name="Users", category="Admin"))
        av(vs.ConnectionModelView(
            Connection, Session, name="Connections", category="Admin"))
        av(vs.VariableView(
            models.Variable, Session, name="Variables", category="Admin"))
        av(vs.XComView(
            models.XCom, Session, name="XComs", category="Admin"))

        admin.add_link(base.MenuLink(
            category='Docs', name='Documentation',
            url='https://airflow.apache.org/'))
        admin.add_link(
            base.MenuLink(category='Docs',
                          name='Github',
                          url='https://github.com/apache/airflow'))

        av(vs.VersionView(name='Version', category="About"))

        av(vs.DagRunModelView(
            models.DagRun, Session, name="DAG Runs", category="Browse"))
        av(vs.DagModelView(models.DagModel, Session, name=None))
        # Hack to not add this view to the menu
        admin._menu = admin._menu[:-1]

        def integrate_plugins():
            """Integrate plugins to the context"""
            from airflow.plugins_manager import (
                admin_views, flask_blueprints, menu_links)
            for v in admin_views:
                log.debug('Adding view %s', v.name)
                admin.add_view(v)
            for bp in flask_blueprints:
                log.debug('Adding blueprint %s', bp.name)
                app.register_blueprint(bp)
            for ml in sorted(menu_links, key=lambda x: x.name):
                log.debug('Adding menu link %s', ml.name)
                admin.add_link(ml)

        integrate_plugins()

        import airflow.www.api.experimental.endpoints as e
        # required for testing purposes otherwise the module retains
        # a link to the default_auth
        if app.config['TESTING']:
            six.moves.reload_module(e)

        app.register_blueprint(e.api_experimental, url_prefix='/api/experimental')

        @app.context_processor
        def jinja_globals():
            return {
                'hostname': get_hostname(),
                'navbar_color': configuration.get('webserver', 'NAVBAR_COLOR'),
            }

        @app.teardown_appcontext
        def shutdown_session(exception=None):
            settings.Session.remove()

        return app
Example #4
0
    def init_app(cls, app):
        ProductionConfig.init_app(app)

        # Handle proxy server headers
        from werkzeug.contrib.fixers import ProxyFix
        app.wsgi_app = ProxyFix(app.wsgi_app)
Example #5
0
def create_app(config=None, session=None, testing=False, app_name="Airflow"):
    global app, appbuilder
    app = Flask(__name__)
    if conf.getboolean('webserver', 'ENABLE_PROXY_FIX'):
        app.wsgi_app = ProxyFix(app.wsgi_app)
    app.secret_key = conf.get('webserver', 'SECRET_KEY')

    airflow_home_path = conf.get('core', 'AIRFLOW_HOME')
    webserver_config_path = airflow_home_path + '/webserver_config.py'
    app.config.from_pyfile(webserver_config_path, silent=True)
    app.config['APP_NAME'] = app_name
    app.config['TESTING'] = testing
    app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False

    app.config['SESSION_COOKIE_HTTPONLY'] = True
    app.config['SESSION_COOKIE_SECURE'] = conf.getboolean(
        'webserver', 'COOKIE_SECURE')
    app.config['SESSION_COOKIE_SAMESITE'] = conf.get('webserver',
                                                     'COOKIE_SAMESITE')

    if config:
        app.config.from_mapping(config)

    csrf.init_app(app)

    db = SQLA(app)

    from airflow import api
    api.load_auth()
    api.api_auth.init_app(app)

    # flake8: noqa: F841
    cache = Cache(app=app,
                  config={
                      'CACHE_TYPE': 'filesystem',
                      'CACHE_DIR': '/tmp'
                  })

    from airflow.www.blueprints import routes
    app.register_blueprint(routes)

    configure_logging()
    configure_manifest_files(app)

    with app.app_context():

        from airflow.www.security import AirflowSecurityManager
        security_manager_class = app.config.get('SECURITY_MANAGER_CLASS') or \
            AirflowSecurityManager

        if not issubclass(security_manager_class, AirflowSecurityManager):
            raise Exception(
                """Your CUSTOM_SECURITY_MANAGER must now extend AirflowSecurityManager,
                 not FAB's security manager.""")

        appbuilder = AppBuilder(app,
                                db.session if not session else session,
                                security_manager_class=security_manager_class,
                                base_template='appbuilder/baselayout.html')

        def init_views(appbuilder):
            from airflow.www import views
            appbuilder.add_view_no_menu(views.Airflow())
            appbuilder.add_view_no_menu(views.DagModelView())
            appbuilder.add_view_no_menu(views.ConfigurationView())
            appbuilder.add_view_no_menu(views.VersionView())
            appbuilder.add_view(views.DagRunModelView,
                                "DAG Runs",
                                category="Browse",
                                category_icon="fa-globe")
            appbuilder.add_view(views.JobModelView, "Jobs", category="Browse")
            appbuilder.add_view(views.LogModelView, "Logs", category="Browse")
            appbuilder.add_view(views.SlaMissModelView,
                                "SLA Misses",
                                category="Browse")
            appbuilder.add_view(views.TaskInstanceModelView,
                                "Task Instances",
                                category="Browse")
            appbuilder.add_link("Configurations",
                                href='/configuration',
                                category="Admin",
                                category_icon="fa-user")
            appbuilder.add_view(views.ConnectionModelView,
                                "Connections",
                                category="Admin")
            appbuilder.add_view(views.PoolModelView, "Pools", category="Admin")
            appbuilder.add_view(views.VariableModelView,
                                "Variables",
                                category="Admin")
            appbuilder.add_view(views.XComModelView, "XComs", category="Admin")
            appbuilder.add_link("Documentation",
                                href='https://airflow.apache.org/',
                                category="Docs",
                                category_icon="fa-cube")
            appbuilder.add_link("GitHub",
                                href='https://github.com/apache/airflow',
                                category="Docs")
            appbuilder.add_link('Version',
                                href='/version',
                                category='About',
                                category_icon='fa-th')

            def integrate_plugins():
                """Integrate plugins to the context"""
                from airflow.plugins_manager import (
                    flask_appbuilder_views, flask_appbuilder_menu_links)

                for v in flask_appbuilder_views:
                    log.debug("Adding view %s", v["name"])
                    appbuilder.add_view(v["view"],
                                        v["name"],
                                        category=v["category"])
                for ml in sorted(flask_appbuilder_menu_links,
                                 key=lambda x: x["name"]):
                    log.debug("Adding menu link %s", ml["name"])
                    appbuilder.add_link(ml["name"],
                                        href=ml["href"],
                                        category=ml["category"],
                                        category_icon=ml["category_icon"])

            integrate_plugins()
            # Garbage collect old permissions/views after they have been modified.
            # Otherwise, when the name of a view or menu is changed, the framework
            # will add the new Views and Menus names to the backend, but will not
            # delete the old ones.

        def init_plugin_blueprints(app):
            from airflow.plugins_manager import flask_blueprints

            for bp in flask_blueprints:
                log.debug("Adding blueprint %s:%s", bp["name"],
                          bp["blueprint"].import_name)
                app.register_blueprint(bp["blueprint"])

        init_views(appbuilder)
        init_plugin_blueprints(app)

        security_manager = appbuilder.sm
        security_manager.sync_roles()

        from airflow.www.api.experimental import endpoints as e
        # required for testing purposes otherwise the module retains
        # a link to the default_auth
        if app.config['TESTING']:
            if six.PY2:
                reload(e)  # noqa
            else:
                import importlib
                importlib.reload(e)

        app.register_blueprint(e.api_experimental,
                               url_prefix='/api/experimental')

        @app.context_processor
        def jinja_globals():
            return {
                'hostname': socket.getfqdn(),
                'navbar_color': conf.get('webserver', 'NAVBAR_COLOR'),
            }

        @app.teardown_appcontext
        def shutdown_session(exception=None):
            settings.Session.remove()

    return app, appbuilder
Example #6
0
 def init_app(cls, app):
     # proxy servers
     from werkzeug.contrib.fixers import ProxyFix
     app.wsgi_app = ProxyFix(app.wsgi_app)
Example #7
0
        proxy_set_header X-Scheme $scheme;
        proxy_set_header X-Script-Name /myprefix;
        }

    :param app: the WSGI application
    '''

    def __init__(self, app):
        self.app = app

    def __call__(self, environ, start_response):
        script_name = environ.get('HTTP_X_SCRIPT_NAME', '')
        if script_name:
            environ['SCRIPT_NAME'] = script_name
            path_info = environ['PATH_INFO']
            if path_info.startswith(script_name):
                environ['PATH_INFO'] = path_info[len(script_name):]

        scheme = environ.get('HTTP_X_SCHEME', '')
        if scheme:
            environ['wsgi.url_scheme'] = scheme
        return self.app(environ, start_response)


application = app
# patch app to handle non root url-s behind proxy & wsgi
app.wsgi_app = ReverseProxyPathFix(ProxyFix(application.wsgi_app))

if __name__ == "__main__":
    run()
Example #8
0
def run(args):
    logger = logging.getLogger("grappled.main.run")
    logger.debug("Launched with arguments: {}".format(args))

    try:
        config = grappled.helpers.parseConfig(args.config_dir)
    except Exception as e:
        logger.error("Fatal error parsing configuration files.")
        logger.error(e)
        return

    app = GrappledFlask(__name__)
    if not args.no_proxyfix:
        from werkzeug.contrib.fixers import ProxyFix
        app.wsgi_app = ProxyFix(app.wsgi_app)

    guid_history = None
    if args.guid:
        guid_history = ExpiringDict(max_len=1000, max_age_seconds=3600)

    # Error Handlers:
    @app.errorhandler(GrappledAPIException)
    def error_handler(error):
        logger.warn(error)
        if args.debug:
            return {'status': error.status, 'message': error.message}, error.status
        else:
            return {'status': 500, 'message': 'Internal Server Error'}, 500

    # Main Endpoint
    @app.route("/<endpoint>", methods=['GET', 'POST'])
    def index(endpoint=None):
        # Get the relevant endpoint object.
        # Normalize the endpoint name:
        endpoint = grappled.helpers.normalizeEndpointName(endpoint)
        try:
            end_cfg = config[endpoint]
        except KeyError:
            raise GrappledAPIException("Endpoint not found: {}".format(endpoint), 404)

        if end_cfg['ip-whitelist']:
            request_ip = ipaddress.ip_address(str(request.remote_addr))
            # Check the whitelist
            if not any((request_ip in white) for white in end_cfg['ip-whitelist']):
                raise GrappledAPIException("IP not in whitelist: {}".format(request_ip), 401)

        # Track GUID
        if not request.headers.get('X-Github-Delivery'):
            raise GrappledAPIException("Rejected event with missing UUID.", 403)
        in_guid = UUID(request.headers.get('X-Github-Delivery'))

        if guid_history is not None:
            if in_guid in guid_history:
                raise GrappledAPIException("Rejected event with duplicate UUID {}".format(in_guid), 401)
            guid_history[in_guid] = True

        # Check signature
        if "key" in end_cfg:
            try:
                hashtype, signature = request.headers.get('X-Hub-Signature').split('=', 2)
            except:
                raise GrappledAPIException("Signature in unexpected format.", 403)

            if hashtype != "sha1":
                raise GrappledAPIException("Signature uses unexpected hash type. Only SHA-1 is supported.", 403)

            assert type(end_cfg["key"]) is bytes
            mac = hmac.new(end_cfg["key"], msg=request.data, digestmod=sha1)
            if not hmac.compare_digest(mac.hexdigest(), signature):
                raise GrappledAPIException("Invalid signature.", 403)

        if request.method == 'POST':
            # Respond to ping events.
            if request.headers.get('X-GitHub-Event') == "ping":
                return {'msg': 'pong'}

            if request.headers.get('X-GitHub-Event') != "push":
                raise GrappledAPIException("Only push events are supported.", 400)

            try:
                payload = request.get_json(force=True)
            except Exception as e:
                raise GrappledAPIException("Malformed JSON input.", 400)

            rv = {"status": 200, "error": None, "guid": in_guid, "do": []}

            for act_name, action in end_cfg["do"]:
                try:
                    dostr = action(payload)
                    rv["do"].append({"plugin_name": act_name, "output": dostr})

                except Exception as e:
                    rv["error"] = {"plugin_name": act_name, "message": str(e.message)}
                    if hasattr(e, "output"):
                        rv["error"]["output"] = e.output
                    break

            return rv

        else:
            raise GrappledAPIException("Method Not Allowed. Only POST requests are allowed.", 405)

    app.run(host=str(args.ip), port=args.port)
Example #9
0
''' Production App '''
import cointracker
my_app = cointracker.app

# Fix the proxy headers issue
from werkzeug.contrib.fixers import ProxyFix
my_app.wsgi_app = ProxyFix(my_app.wsgi_app)
Example #10
0
def create_app():


    app = Flask(__name__)
    APIKEY = os.environ['CHARON_APIKEY']
    CONFIG_FILE = os.environ['CHARON_CONFIG_FILE']

    app.wsgi_app = ProxyFix(app.wsgi_app, num_proxies=1)
    limiter = Limiter(app, key_func=get_remote_address)

    def require_apikey(func):
        @wraps(func)
        def decorated_function(*args, **kwargs):
            if request.headers.get('X-Api-Key') and request.headers.get('X-Api-Key') == APIKEY:
                return func(*args, **kwargs)
            else:
                logger.debug('Invalid API key. {0}'.format(request.json))
                logger.debug('Headers: {}'.format(request.headers))
                logger.debug('Headers API key: {}'.format(request.headers.get('X-Api-Key')))
                abort(401)
        return decorated_function
    
    
    @app.route('/job', methods=['POST'])
    @limiter.limit("300 per hour")
    @require_apikey
    def index():
        logger.debug('Received request: {}'.format(request.json))
        charon_request = {}
        content = request.json
    
        try:
            charon_request = {
                'request' : {
                    'source_project_id': content.get('source_project_id'),
                    'dest_project_id': content.get('dest_project_id'),
                    'image_uuids': content.get('image_uuids')
                }
            }
        except (TypeError, AttributeError):
            logger.warning('Request contents are incorrect')
            return Response('{"Error": "Request must contain: source_project_id, dest_project_id, image_uuids"}',
                status=400, mimetype='application/json')


        try:
            job_config = config.Config(CONFIG_FILE, charon_request)
        except ValidationError as e:
            logger.error('Error validating config. {}'.format(e))
            error_string = '{"Error": "There was an issue validating the config.' + str(e) +'"}'
            return Response(error_string,
                status=400, mimetype='application/json')
    
        jobs = []
        for job in content.get('image_uuids'):
            test_job = q.enqueue('charon.util.run_tests', job_config, timeout=5000)
            jobs.append(test_job.get_id())
            logger.debug('Dispatched job {0} for image_uuid {1}'.format(test_job.get_id(), job))
    
        return jsonify({'job_ids': jobs})
    
    @app.route('/job/<job_id>', methods=['GET'])
    def get_results(job_id):
    
        logger.debug('Received request: {}'.format(request))
        job = Job(job_id, connection=conn)
    
        if job.is_finished:
            return jsonify({'result': str(job.result)})
        elif job.is_failed:
            return jsonify({'status': 'failed', 'result': job.exc_info})
        elif job._status == 'started':
            return jsonify({'status': 'running'})
        else:
            return jsonify({'status': 'error', 'result': job.exc_info})

    return app
Example #11
0
def init_webapp(config, test=False):
    """Initialize the web application.

    Initializes and configures the Flask web application. Call this method to
    make the web application and respective database engine usable.

    If initialized with `test=True` the application will use an in-memory
    SQLite database, and should be used for unit testing, but not much else.

    """

    # Make app work with proxies (like nginx) that set proxy headers.
    app.wsgi_app = ProxyFix(app.wsgi_app)

    # logging.getLogger('flask_cors').level = logging.DEBUG

    # Note, this url namespace also exists for the Flask-Restless extension and
    # is where CRUD interfaces live, so be careful not to collide with model
    # names here. We could change this, but it's nice to have API live in the
    # same url namespace.
    app.register_blueprint(api, url_prefix='/api')

    # Initialize Flask configuration
    if test:
        app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite://'
    else:
        app.config['SQLALCHEMY_DATABASE_URI'] = config['webapp'][
            'database_uri']

    # FIXME: Port these over to configobj.
    app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
    app.config['SECRET_KEY'] = os.environ.get('SECRET_KEY', 'abc123')
    app.config['SECURITY_TOKEN_MAX_AGE'] = 60
    app.config['SECURITY_TOKEN_AUTHENTICATION_HEADER'] = 'Auth-Token'
    app.config['SECURITY_PASSWORD_HASH'] = 'bcrypt'
    app.config['SECURITY_PASSWORD_SALT'] = os.environ.get('SALT', 'salt123')
    app.config['SECURITY_REGISTERABLE'] = True
    app.config['SECURITY_CONFIRMABLE'] = False
    app.config['SECURITY_SEND_REGISTER_EMAIL'] = False

    # This thing is a supreme PIA with API, and because we're using token based
    # authentication.
    app.config['WTF_CSRF_ENABLED'] = False

    # Initialize Flask-CORS
    CORS(app, supports_credentials=True)
    # CORS(app, supports_credentials=True, resources={r"/*": {"origins": "*"}})

    # Initialize Flask-Bootstrap
    Bootstrap(app)

    # Initialize Flask-Security
    user_datastore = SQLAlchemyUserDatastore(db, User, Role)
    Security(app, user_datastore)

    app.config['GOOGLE_CLIENT_ID'] = os.environ.get('GOOGLE_CLIENT_ID',
                                                    'abc123')
    app.config['GOOGLE_CLIENT_SECRET'] = os.environ.get(
        'GOOGLE_CLIENT_SECRET', 'password')
    app.config[
        'GOOGLE_REFRESH_TOKEN_URL'] = 'https://www.googleapis.com/oauth2/v4/token'
    app.config['GOOGLE_CLIENT_KWARGS'] = dict(scope=' '.join([
        'openid',
        'https://www.googleapis.com/auth/userinfo.profile',
        'https://www.googleapis.com/auth/calendar.readonly',
    ]))

    # Initialize Authlib.
    oauth = OAuth()
    oauth.init_app(app,
                   fetch_token=authlib_fetch_token,
                   update_token=authlib_update_token)
    google_blueprint = create_flask_blueprint(Google, oauth,
                                              authlib_handle_authorize)
    app.register_blueprint(google_blueprint, url_prefix='/google')
    # Save the oauth object in the app so handlers can use it to build clients.
    app.oauth = oauth

    # Initialize Flask-SQLAlchemy
    db.app = app
    db.init_app(app)
    # NOTE: You don't want to use this if you're using alembic, since alembic
    # is now in charge of creating/upgrading/downgrading your database. If you
    # choose to not use alembic, you can add this line here.
    # db.create_all()

    # Initialize Flask-Restless
    manager = APIManager(
        app,
        flask_sqlalchemy_db=db,
        preprocessors=dict(GET_MANY=[restless_api_auth_func]),
    )
    # manager.create_api(TableName methods=['GET', 'POST', 'OPTIONS'])
    return app
Example #12
0
    res.status_code = 200
    return res


# Associate flask app with flask_restplus configurations via blueprint
flask_app.register_blueprint(rest_plus_blueprint)

# Configure and associate monitoring dashboard
# dashboard.config.init_from(envvar=ENV.DASHBOARD_CONFIG)
# dashboard.bind(flask_app)

# Required for custom error handler; see: https://stackoverflow.com/a/36575875/9730910
flask_app.config['TRAP_HTTP_EXCEPTIONS'] = True
flask_app.register_error_handler(Exception, default_error_handler)

# This is required to serve swagger through https;
# Source: https://github.com/noirbizarre/flask-restplus/issues/54#issuecomment-143436291
flask_app.wsgi_app = ProxyFix(flask_app.wsgi_app)  # type: ignore

# Required to trim whitespaces in templates
flask_app.jinja_env.trim_blocks = True
flask_app.jinja_env.lstrip_blocks = True

# Opens swagger routes by default:
flask_app.config.SWAGGER_UI_DOC_EXPANSION = 'list'  # type: ignore

# Start app
if __name__ == "__main__":
    flask_app.run(port=PORT)
    logger.info("<><><> STARTING APP <><><>")
Example #13
0
def app_factory():
    monkey_patch_json_encoder()

    app = Flask(__name__)
    app.config['SQLALCHEMY_DATABASE_URI'] = config.get('SQLALCHEMY_DATABASE_URI')
    app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True
    app.secret_key = config.get('FLASK_SESSION_SECRET_KEY')

    db.init_app(app)

    Marshmallow(app)

    app.wsgi_app = ProxyFix(app.wsgi_app)
    app.url_map.strict_slashes = False

    blueprint = Blueprint('v1', __name__, url_prefix='/api/v1')
    api.init_app(blueprint)
    api.add_namespace(auth_namespace, '/authentication')
    api.add_namespace(users_namespace, '/users')

    app.register_blueprint(blueprint)
    app.session_interface = AppSessionInterface()

    login_manager = LoginManager()
    login_manager.init_app(app)

    seed_data_enabled = config.get('SEED_DATA_ENABLED')
    if seed_data_enabled:
        with app.app_context():
            seed_data = SeedDataService()
            seed_data.seed()

    @login_manager.user_loader
    def load_user(user_id):
        return User.query.get(user_id)

    @login_manager.request_loader
    def load_user_from_request(request):
        header = request.headers.get('Authorization')
        if header is None:

            # review how to whitelist end points that we know won't ever require authn/authz
            # total hack, clean up with werkzeug or flask trimming, or our own method... this is super messy.
            whitelist = ['/api/v1', '/api/v1?', '/api/v1/?', '/api/v1/swagger.json']
            if request.full_path in whitelist:
                return

            raise Unauthorized()

        header_value = header.split()
        auth_type = header_value[0].lower()

        if auth_type == 'bearer':
            authenticated_bearer_token(header_value[1])

        elif auth_type == 'basic':
            creds = request.authorization
            if creds is not None:
                authenticate_basic(creds.username, creds.password)

        if current_user is None:
            raise Unauthorized()

        g.authenticated_from_header = True

    @app.after_request
    def after_request(response):
        if 'cache-control' not in response.headers:
            response.headers['cache-control'] = 'no-cache'
        return response

    @api.errorhandler
    def default_error_handler(e):
        """
        Provide a default error handler for RestPlus to leverage.
        """
        logger.exception(e)
        debug = config.get('FLASK_DEBUG')
        if not debug:
            message = 'An unhandled exception occurred.'
            return {'message': message}, 500

    @app.errorhandler(400)
    def bad_request_error(e):
        return jsonify(error=400, text=str(e)), 400

    @app.errorhandler(404)
    def page_not_found(e):
        return jsonify(error=404, text=str(e)), 404

    @app.errorhandler(500)
    def server_error(e):
        logger.error(e)
        return jsonify(error=500, text=str(e)), 500

    @app.errorhandler(RequestException)
    def request_exception(e):
        logger.error(e)
        return jsonify(error=500, text=str(e)), 500

    return app
Example #14
0
def create_app(name=__name__,
               init_mode=False,
               destroy_mode=False,
               worker_mode=False,
               testing_mode=False,
               skip_endpoint_mapping=False,
               **kwargs):
    """ Create the server istance for Flask application """

    if PRODUCTION and testing_mode:
        log.exit("Unable to execute tests in production")

    #############################
    # Initialize reading of all files
    mem.customizer = Customizer(testing_mode, PRODUCTION, init_mode)
    # FIXME: try to remove mem. from everywhere...

    #############################
    # Add template dir for output in HTML
    from utilities import helpers
    tp = helpers.script_abspath(__file__, 'templates')
    kwargs['template_folder'] = tp

    #################################################
    # Flask app instance
    #################################################

    microservice = Flask(name, **kwargs)

    ##############################
    # Add commands to 'flask' binary
    if init_mode:
        microservice.config['INIT_MODE'] = init_mode
        skip_endpoint_mapping = True
    elif destroy_mode:
        microservice.config['DESTROY_MODE'] = destroy_mode
        skip_endpoint_mapping = True
    elif testing_mode:
        microservice.config['TESTING'] = testing_mode
        init_mode = True
        # microservice.config['INIT_MODE'] = init_mode
    elif worker_mode:
        skip_endpoint_mapping = True

    ##############################
    # Fix proxy wsgi for production calls
    microservice.wsgi_app = ProxyFix(microservice.wsgi_app)

    ##############################
    # Cors
    from restapi.protocols.cors import cors
    cors.init_app(microservice)
    log.verbose("FLASKING! Injected CORS")

    ##############################
    # Enabling our internal Flask customized response
    microservice.response_class = InternalResponse

    ##############################
    # Flask configuration from config file
    microservice.config.from_object(config)
    log.debug("Flask app configured")
    # log.pp(microservice.__dict__)

    ##############################
    if PRODUCTION:

        log.info("Production server mode is ON")

        # FIXME: random secrety key in production
        # # Check and use a random file a secret key.
        # install_secret_key(microservice)

        # # To enable exceptions printing inside uWSGI
        # # http://stackoverflow.com/a/17839750/2114395
        # from werkzeug.debug import DebuggedApplication
        # app.wsgi_app = DebuggedApplication(app.wsgi_app, True)

    ##############################
    # Find services and try to connect to the ones available
    extensions = detector.init_services(app=microservice,
                                        worker_mode=worker_mode,
                                        project_init=init_mode,
                                        project_clean=destroy_mode)

    if worker_mode:
        microservice.extensions = extensions

    ##############################
    # Restful plugin
    if not skip_endpoint_mapping:
        # Triggering automatic mapping of REST endpoints
        current_endpoints = create_endpoints(farmer.EndpointsFarmer(Api))
        # Restful init of the app
        current_endpoints.rest_api.init_app(microservice)

        ##############################
        # Injection!
        # Enabling "configuration modules" for services to be injected
        # IMPORTANT: Injector must be initialized AFTER mapping endpoints

        modules = detector.load_injector_modules()

        # AVOID warnings from Flask Injector
        warnings.filterwarnings("ignore")

        FlaskInjector(app=microservice, modules=modules)

        # otherwise...
        # Catch warnings from Flask Injector
        # try:
        #     FlaskInjector(app=microservice, modules=modules)
        # except RuntimeWarning:
        #     pass

    ##############################
    # Clean app routes
    ignore_verbs = {"HEAD", "OPTIONS"}

    for rule in microservice.url_map.iter_rules():

        rulename = str(rule)
        # Skip rules that are only exposing schemas
        if '/schemas/' in rulename:
            continue

        endpoint = microservice.view_functions[rule.endpoint]
        if not hasattr(endpoint, 'view_class'):
            continue
        newmethods = ignore_verbs.copy()

        for verb in rule.methods - ignore_verbs:
            method = verb.lower()
            if method in mem.customizer._original_paths[rulename]:
                # remove from flask mapping
                # to allow 405 response
                newmethods.add(verb)
            else:
                log.verbose("Removed method %s.%s from mapping" %
                            (rulename, verb))

        rule.methods = newmethods

        # FIXME: SOLVE CELERY INJECTION
        # # Set global objects for celery workers
        # if worker_mode:
        #     mem.services = internal_services

    ##############################
    # Logging responses
    @microservice.after_request
    def log_response(response):

        ###############################
        # NOTE: if it is an upload,
        # I must NOT consume request.data or request.json,
        # otherwise the content gets lost
        do_not_log_types = [
            'application/octet-stream',
            'multipart/form-data',
        ]

        if request.mimetype in do_not_log_types:
            data = 'STREAM_UPLOAD'
        else:
            try:
                data = handle_log_output(request.data)
                # Limit the parameters string size, sometimes it's too big
                for k in data:
                    try:
                        if not isinstance(data[k], str):
                            continue
                        if len(data[k]) > MAX_CHAR_LEN:
                            data[k] = data[k][:MAX_CHAR_LEN] + "..."
                    except IndexError:
                        pass
            except Exception as e:
                data = 'OTHER_UPLOAD'

        log.info("%s %s %s %s", request.method, request.url, data, response)

        return response

    ##############################
    # and the flask App is ready now:
    log.info("Boot completed")
    # return our flask app
    return microservice
Example #15
0
from dotenv import load_dotenv
import os
from werkzeug.contrib.fixers import ProxyFix  #反向代理

dotenv_path = os.path.join(os.path.dirname(__file__), '.env')
if os.path.exists(dotenv_path):
    load_dotenv(dotenv_path)  #手动导入环境变量

from bluelog import create_app

app = create_app('production')
app.wsgi_app = ProxyFix(app.wsgi_app)  #方向代理设置
Example #16
0
log = Logger('file_server.log').get_logger()

UPLOAD_FOLDER = 'uploaded_files'
ALLOWED_EXTENSIONS = {'db'}

application = Flask(__name__)

# application.debug = False

application.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
application.config['SECRET_KEY'] = "4&^^%%$%BJHGFGHHVVBN%$$#^"
# 文件大小限制
application.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024

application.wsgi_app = ProxyFix(application.wsgi_app)


def setup_error_handler(app):
    @app.errorhandler(400)
    @app.errorhandler(ValueError)
    def http_bad_request(e):
        log.warn('{addr} request: {method}, '
                 'url: {url}'.format(addr=get_remote_addr(),
                                     method=request.method,
                                     url=request.url))
        log.warn("{}".format(request.headers))
        log.exception(e)
        return fail(HTTP_BAD_REQUEST)

    @app.errorhandler(403)
Example #17
0
def main():  # pragma: no cover
    """The main function for :program:`geofront-server` CLI program."""
    parser = main_parser()
    args = parser.parse_args()
    try:
        app.config.from_pyfile(os.path.abspath(args.config), silent=False)
    except FileNotFoundError:
        parser.error('unable to load configuration file: ' + args.config)
    logger = logging.getLogger('geofront')
    handler = logging.StreamHandler()
    level = logging.DEBUG if args.debug else logging.INFO
    handler.setLevel(level)
    logger.addHandler(handler)
    logger.setLevel(level)
    master_key_store = get_master_key_store()
    remote_set = get_remote_set()
    servers = frozenset(remote_set.values())
    master_key_bits = app.config['MASTER_KEY_BITS']
    if not isinstance(master_key_bits, int):
        parser.error('MASTER_KEYS_BITS configuration must be an integer, '
                     'not ' + repr(master_key_bits))
    elif master_key_bits < 1024:
        parser.error('MASTER_KEY_BITS has to be 1024 at least.')
    elif master_key_bits % 256:
        parser.error('MASTER_KEY_BITS has to be a multiple of 256.')
    try:
        regenerate(
            master_key_store,
            remote_set,
            master_key_bits,
            create_if_empty=args.create_master_key or args.renew_master_key,
            renew_unless_empty=(args.renew_master_key
                                and not os.environ.get('WERKZEUG_RUN_MAIN')))
    except RegenError as e:
        parser.error(str(e))
    master_key_renewal_interval = app.config['MASTER_KEY_RENEWAL']
    if not (master_key_renewal_interval is None
            or isinstance(master_key_renewal_interval, datetime.timedelta)):
        raise RuntimeError(
            'MASTER_KEY_RENEWAL configuration must be an instance of '
            'datetime.timedelta, not {!r}'.format(master_key_renewal_interval))
    if master_key_renewal_interval is not None:
        master_key_renewal = PeriodicalRenewal(servers, master_key_store,
                                               master_key_renewal_interval,
                                               master_key_bits)
    waitress_options = {}
    if args.trusted_proxy:
        if hasattr(Adjustments, 'trusted_proxy'):
            # > 0.8.8
            # https://github.com/Pylons/waitress/pull/42
            waitress_options['trusted_proxy'] = True
        else:
            # <= 0.8.8
            app.wsgi_app = ProxyFix(app.wsgi_app)
    if args.force_https:
        app.config.update(PREFERRED_URL_SCHEME='https', ENABLE_HSTS=True)
    try:
        if args.debug:
            app.run(args.host, args.port, debug=True)
        else:
            serve(app,
                  host=args.host,
                  port=args.port,
                  asyncore_use_poll=True,
                  **waitress_options)
    finally:
        if master_key_renewal_interval is not None:
            master_key_renewal.terminate()
Example #18
0
from gevent import monkey
monkey.patch_all()

from flask import Flask
from flask_restful import Api
from werkzeug.contrib.fixers import ProxyFix
from ocdsapi.storage import ReleaseStorage
from ocdsapi.utils import build_meta
from pkg_resources import iter_entry_points

APP = Flask('ocdsapi')
APP.wsgi_app = ProxyFix(APP.wsgi_app)  # Fixed proto on nginx proxy
API = Api(APP)


def make_paste_application(global_config, **options):
    APP.config['DEBUG'] = options.get('debug', False)
    APP.db = ReleaseStorage(
        options.get('couchdb_url'),
        options.get('couchdb_dbname'),
    )
    APP.paginate_by = options.get('paginate_by', 20)
    APP.config['metainfo'] = build_meta(options)
    for plugin in iter_entry_points('ocdsapi.resources'):
        includeme = plugin.load()
        includeme(options)
    return APP
Example #19
0
    'intcomma': humanize.intcomma,
    'intword': humanize.intword,
    'naturalday': humanize.naturalday,
    'naturaldate': humanize.naturaldate,
    'naturaltime': humanize.naturaltime,
    'naturalsize': humanize.naturalsize,
    'datetime': format_datetime,
    'isodatetime': format_isodatetime,
    'format_currency': format_currency,
    'uuid': format_uuid,
})
app.static_folder = 'static'

if app.config['PROXY_FIX']:
    from werkzeug.contrib.fixers import ProxyFix
    app.wsgi_app = ProxyFix(app.wsgi_app,
                            num_proxies=app.config['PROXY_FIX_NUM_PROXIES'])

redis_conn = redis.from_url(app.config['REDIS_URL'])

cache = RedisCache(host=redis_conn)
csrf = CSRFProtect(app)
db = SQLAlchemy(app)
migrate = Migrate(app, db)

from wuvt.auth import AuthManager
auth_manager = AuthManager()
auth_manager.db = db
auth_manager.init_app(app)

if len(app.config['SENTRY_DSN']) > 0:
    sentry_sdk.init(app.config['SENTRY_DSN'],
Example #20
0
def create_app(config_class=Config):
    app = Flask(__name__)
    app.config.from_object(config_class)

    db.init_app(app)
    migrate.init_app(app, db)
    login.init_app(app)
    mail.init_app(app)
    bootstrap.init_app(app)
    moment.init_app(app)

    from app.errors import bp as errors_bp
    app.register_blueprint(errors_bp)

    from app.contract import bp as contract_bp
    app.register_blueprint(contract_bp)

    from app.tender import bp as tender_bp
    app.register_blueprint(tender_bp)

    from app.auth import bp as auth_bp
    app.register_blueprint(auth_bp, url_prefix='/auth')

    from app.main import bp as main_bp
    app.register_blueprint(main_bp)

    from app.clients import bp as clients_bp
    app.register_blueprint(clients_bp)

    configure_uploads(app, images)

    app.add_url_rule('/uploads/<filename>', 'uploaded_file',
                     build_only=True)
    app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {
        '/uploads':  app.config['UPLOAD_FOLDER']
    })

    app.wsgi_app = ProxyFix(app.wsgi_app)


    app.config['MAX_CONTENT_LENGTH'] = 2 * 1024 * 1024

    if not app.debug and not app.testing:
        if app.config['MAIL_SERVER']:
            auth = None
            if app.config['MAIL_USERNAME'] or app.config['MAIL_PASSWORD']:
                auth = (app.config['MAIL_USERNAME'], app.config['MAIL_PASSWORD'])
            secure = None
            if app.config['MAIL_USE_TLS']:
                secure = ()
        mail_handler = SMTPHandler(
            mailhost=(app.config['MAIL_SERVER'], app.config['MAIL_PORT']),
            fromaddr='no-reply@' + app.config['MAIL_SERVER'],
            toaddrs=app.config['ADMINS'], subject='Microblog Failure',
            credentials=auth, secure=secure)
        mail_handler.setLevel(logging.ERROR)
        app.logger.addHandler(mail_handler)

        if not os.path.exists('logs'):
            os.mkdir('logs')
            file_handler = RotatingFileHandler('logs/microblog.log', maxBytes=10240,
                                       backupCount=10)
            file_handler.setFormatter(logging.Formatter(
                '%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'))
            file_handler.setLevel(logging.INFO)
            app.logger.addHandler(file_handler)

            app.logger.setLevel(logging.INFO)
            app.logger.info('Microblog startup')

    return app
Example #21
0
def create_app(test_config=None):
    print("{}".format('-' * 60))
    import_name = __name__.split('.')[0]
    # step 1: Create Flask application object
    app = ApiFlask(import_name, instance_relative_config=True)
    print(f" > Flask App (with import name <{app.import_name}>) was created")

    # If this is set to ``True`` the first newline after a block is removed (block, not variable tag!).  Defaults to `False`.
    app.jinja_env.trim_blocks = True
    # If this is set to ``True`` leading spaces and tabs are stripped from the start of a line to a block.  Defaults to `False`.
    app.jinja_env.lstrip_blocks = True
    # The string marking the beginning of a print statement.
    app.jinja_env.variable_start_string = '{{'
    # The string marking the end of a print statement.
    app.jinja_env.variable_end_string = '}}'

    # app.register_api_error_handler()
    app.wsgi_app = ProxyFix(app.wsgi_app)

    # step 2: Builtin/Extensions/MyOwn Flask Configuration
    app.config.from_object(cfg.get('base'))

    # step 3: Overwrite Flask Configuration with cfg file in instance folder when not testing
    if test_config is None:
        print(' > No testing configuration found')
        app.config.from_pyfile('config.cfg', silent=True)
    else:
        # load the test config if passed in
        print(' > Applying testing configuration')
        app.config.from_mapping(test_config)

    # Ensure the instance folder exists
    try:
        os.makedirs(app.instance_path)
    except OSError:
        print(f' > Instance Folder Found : {app.instance_path}')
        pass
    """Binds the application only.
	> For as long as the application is bound to the current context the :data:`flask.current_app` points to that application.
	> An application context is automatically created when a request context is pushed if necessary
	"""
    with app.app_context():
        # step 4:
        # Initialize extensions to current app object, this should be done after the flask object configuration
        from .extension import init_app_extensions
        init_app_extensions(app)
        # step 5: Register Blueprints
        register_blueprints(app, 'bp', include_packages=True, recursive=False)

        # step 6:
        # Init the DataBase
        from app.database.sqlalchemy_database import db_session

    # LazyLoad URL rules
    loader = LazyLoader(app)
    loader.url('favicon', ['/favicon.ico'], endpoint='favicon')
    loader.url('find_vendor_folder', ['/static/vendor/<path:filename>'],
               endpoint='vendor')

    # Close the session after each request or application context shutdown
    @app.teardown_appcontext
    def shutdown_session(exception=None):
        db_session.remove()

    print("{}".format('-' * 60))
    return app  # Return application object/instance
Example #22
0
import os

from flask import Flask
from flask_migrate import Migrate
from werkzeug.contrib.fixers import ProxyFix

from myca import config, models
from myca.admin import admin

app = Flask(__name__, template_folder='admin/templates')

app.secret_key = config.secret_key
app.config['SQLALCHEMY_DATABASE_URI'] = config.database_uri
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False

if config.reverse_proxy_count:
    app.wsgi_app = ProxyFix(app.wsgi_app,
                            num_proxies=config.reverse_proxy_count)

models.db.app = app
models.db.init_app(app)

migrate = Migrate(app,
                  models.db,
                  directory=os.path.join(config.app_root, 'myca',
                                         'migrations'))

admin.init_app(app)
Example #23
0
from flask import Flask
from flask_restplus import Api
from flask_sqlalchemy import SQLAlchemy
from app.utils import setting
from werkzeug.contrib.fixers import ProxyFix

flask_app = Flask(__name__)
flask_app.wsgi_app = ProxyFix(flask_app.wsgi_app)
flask_app.config.from_object(setting)

api = Api(app=flask_app,
          version="1.0",
          title="Modovision Apis",
          description="Manage names of various apps api of the application")

# Register namespace
ns_WhatTheMask = api.namespace('WhatTheMask',
                               description='WhatTheMask android app APIs')
ns_RabbitMQ = api.namespace('RabbitMQ',
                            description='WhatTheMask android app APIs')
ns_APIAuth = api.namespace('auth', description='Auth')

# Db connector
db = SQLAlchemy(flask_app)

from app.Controllers import WhatTheMaskController, APIAuthController, RabbitMQController
from app.Expects import WhatTheMaskExpect
from app.Models import WhatTheMaskModel, APIAuthModel, RabbitMQModel
from app.Services import WhatTheMaskService, APIAuthService, RabbitMQService
Example #24
0
def create_app(config=None, session=None, testing=False, app_name="Airflow"):
    global app, appbuilder
    app = Flask(__name__)
    app.wsgi_app = ProxyFix(app.wsgi_app)
    app.secret_key = conf.get('webserver', 'SECRET_KEY')

    airflow_home_path = conf.get('core', 'AIRFLOW_HOME')
    webserver_config_path = airflow_home_path + '/webserver_config.py'
    app.config.from_pyfile(webserver_config_path, silent=True)
    app.config['APP_NAME'] = app_name
    app.config['TESTING'] = testing

    csrf.init_app(app)

    db = SQLA(app)

    from airflow import api
    api.load_auth()
    api.api_auth.init_app(app)

    cache = Cache(app=app, config={'CACHE_TYPE': 'filesystem', 'CACHE_DIR': '/tmp'})  # noqa

    from airflow.www_rbac.blueprints import routes
    app.register_blueprint(routes)

    configure_logging()
    configure_manifest_files(app)

    with app.app_context():

        from airflow.www_rbac.security import AirflowSecurityManager
        security_manager_class = app.config.get('SECURITY_MANAGER_CLASS') or \
            AirflowSecurityManager

        if not issubclass(security_manager_class, AirflowSecurityManager):
            raise Exception(
                """Your CUSTOM_SECURITY_MANAGER must now extend AirflowSecurityManager,
                 not FAB's security manager.""")

        appbuilder = AppBuilder(
            app,
            db.session if not session else session,
            security_manager_class=security_manager_class,
            base_template='appbuilder/baselayout.html')

        def init_views(appbuilder):
            from airflow.www_rbac import views
            appbuilder.add_view_no_menu(views.Airflow())
            appbuilder.add_view_no_menu(views.DagModelView())
            appbuilder.add_view_no_menu(views.ConfigurationView())
            appbuilder.add_view_no_menu(views.VersionView())
            appbuilder.add_view(views.DagRunModelView,
                                "DAG Runs",
                                category="Browse",
                                category_icon="fa-globe")
            appbuilder.add_view(views.JobModelView,
                                "Jobs",
                                category="Browse")
            appbuilder.add_view(views.LogModelView,
                                "Logs",
                                category="Browse")
            appbuilder.add_view(views.SlaMissModelView,
                                "SLA Misses",
                                category="Browse")
            appbuilder.add_view(views.TaskInstanceModelView,
                                "Task Instances",
                                category="Browse")
            appbuilder.add_link("Configurations",
                                href='/configuration',
                                category="Admin",
                                category_icon="fa-user")
            appbuilder.add_view(views.ConnectionModelView,
                                "Connections",
                                category="Admin")
            appbuilder.add_view(views.PoolModelView,
                                "Pools",
                                category="Admin")
            appbuilder.add_view(views.VariableModelView,
                                "Variables",
                                category="Admin")
            appbuilder.add_view(views.XComModelView,
                                "XComs",
                                category="Admin")
            appbuilder.add_link("Documentation",
                                href='https://airflow.apache.org/',
                                category="Docs",
                                category_icon="fa-cube")
            appbuilder.add_link("Github",
                                href='https://github.com/apache/incubator-airflow',
                                category="Docs")
            appbuilder.add_link('Version',
                                href='/version',
                                category='About',
                                category_icon='fa-th')

            # Garbage collect old permissions/views after they have been modified.
            # Otherwise, when the name of a view or menu is changed, the framework
            # will add the new Views and Menus names to the backend, but will not
            # delete the old ones.

        init_views(appbuilder)

        security_manager = appbuilder.sm
        security_manager.sync_roles()

        from airflow.www_rbac.api.experimental import endpoints as e
        # required for testing purposes otherwise the module retains
        # a link to the default_auth
        if app.config['TESTING']:
            if six.PY2:
                reload(e) # noqa
            else:
                import importlib
                importlib.reload(e)

        app.register_blueprint(e.api_experimental, url_prefix='/api/experimental')

        @app.context_processor
        def jinja_globals():
            return {
                'hostname': socket.getfqdn(),
                'navbar_color': conf.get('webserver', 'NAVBAR_COLOR'),
            }

        @app.teardown_appcontext
        def shutdown_session(exception=None):
            settings.Session.remove()

    return app, appbuilder
Example #25
0
def create_app(_read_config=True, **config):
    app = flask.Flask(
        __name__,
        static_folder=None,
        template_folder=os.path.join(PROJECT_ROOT, 'templates'))

    # Utilized for sessions and other secrets
    # NOTE: This key is insecure and you should override it on the server
    app.config['SECRET_KEY'] = 't\xad\xe7\xff%\xd2.\xfe\x03\x02=\xec\xaf\\2+\xb8=\xf7\x8a\x9aLD\xb1'
    if 'SECRET_KEY' in os.environ:
        app.config['SECRET_KEY'] = os.environ['SECRET_KEY']

    # The api key to authorize end users against this system.
    # NOTE: This key is insecure and you should override it on the server
    app.config['API_KEY'] = '3e84744ab2714151b1db789df82b41c0021958fe4d77406e9c0947c34f5c5a70'
    if 'API_KEY' in os.environ:
        app.config['API_KEY'] = os.environ['API_KEY']

    # The private key to use when cloning repositories
    # TODO(dcramer): this should support an on-disk option, as well as be
    # possible to override per repo
    app.config['SSH_PRIVATE_KEY'] = os.environ.get('SSH_PRIVATE_KEY', '').replace("\\n", "\n")

    app.config['FREIGHT_URL'] = os.environ.get('FREIGHT_URL', '').rstrip('/')

    if 'REDISCLOUD_URL' in os.environ:
        app.config['REDIS_URL'] = os.environ['REDISCLOUD_URL']
    elif 'REDIS_URL' in os.environ:
        app.config['REDIS_URL'] = os.environ['REDIS_URL']

    app.config['WORKSPACE_ROOT'] = os.environ.get('WORKSPACE_ROOT', '/tmp')

    app.config['DEFAULT_TIMEOUT'] = int(os.environ.get('DEFAULT_TIMEOUT', 3600))
    app.config['DEFAULT_READ_TIMEOUT'] = int(os.environ.get('DEFAULT_READ_TIMEOUT', 600))

    app.config['LOG_LEVEL'] = os.environ.get('LOG_LEVEL', 'INFO' if config.get('DEBUG') else 'ERROR')

    app.config['DEV'] = config.get('DEV', False)

    # Currently authentication requires Google
    app.config['GOOGLE_CLIENT_ID'] = os.environ.get('GOOGLE_CLIENT_ID')
    app.config['GOOGLE_CLIENT_SECRET'] = os.environ.get('GOOGLE_CLIENT_SECRET')
    app.config['GOOGLE_DOMAIN'] = os.environ.get('GOOGLE_DOMAIN')

    # Generate a GitHub token via Curl:
    # curlish https://api.github.com/authorizations \
    #     -u your-username \
    #     -X POST \
    #     -J scopes='repo' \
    #     -J note='freight'
    app.config['GITHUB_TOKEN'] = os.environ.get('GITHUB_TOKEN')
    app.config['GITHUB_API_ROOT'] = 'https://api.github.com'

    app.config['SQLALCHEMY_COMMIT_ON_TEARDOWN'] = True
    app.config['SQLALCHEMY_POOL_SIZE'] = 5
    app.config['SQLALCHEMY_MAX_OVERFLOW'] = 0
    if 'SQLALCHEMY_DATABASE_URI' in os.environ:
        app.config['SQLALCHEMY_DATABASE_URI'] = os.environ['SQLALCHEMY_DATABASE_URI']

    app.config['QUEUES'] = [
        'freight.default',
        'freight.tasks',
        'freight.queue',
        'freight.notifications',
    ]
    app.config['QUEUE_DEFAULT'] = 'freight.default'
    app.config['QUEUE_ROUTES'] = {
        'freight.jobs.execute_task': 'freight.tasks',
        'freight.jobs.check_queue': 'freight.queue',
        'freight.jobs.send_pending_notifications': 'freight.notifications',
    }
    app.config['QUEUE_SCHEDULE'] = {
        'freight.jobs.check_queue': {
            'seconds': 1,
        },
        'freight.jobs.send_pending_notifications': {
            'seconds': 1,
        },
    }

    app.config['SENTRY_INCLUDE_PATHS'] = [
        'freight',
    ]

    # We don't support non-proxied installs
    app.wsgi_app = ProxyFix(app.wsgi_app)

    # Pull in Heroku configuration
    heroku.init_app(app)

    # Pull in environment variables from docker
    docker_init_app(app)

    if 'DYNO' in os.environ:
        # XXX: the released version of flask-sslify does not support init_app
        SSLify(app)

    # Set any remaining defaults that might not be present yet
    if not app.config.get('SQLALCHEMY_DATABASE_URI'):
        app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql:///freight'

    app.config.setdefault('REDIS_URL', 'redis://localhost:6379')
    app.config.setdefault('REDIS_DB', 0)

    app.config.update(config)

    if _read_config:
        if os.environ.get('FREIGHT_CONF'):
            # FREIGHT_CONF=/etc/freight.conf.py
            app.config.from_envvar('FREIGHT_CONF')
        else:
            # Look for ~/.freight/freight.conf.py
            path = os.path.normpath(os.path.expanduser('~/.freight/freight.conf.py'))
            app.config.from_pyfile(path, silent=True)

    configure_logging(app)
    configure_sentry(app)
    configure_api(app)
    configure_redis(app)
    configure_queue(app)
    configure_sqlalchemy(app)
    configure_web_routes(app)
    configure_webpack(app)

    return app
Example #26
0
lunaport_server.wgi
~~~~~~~~~~~~~~~~~~~

Describes  WSGI application.
"""

import os

from flask import Flask
from werkzeug.contrib.fixers import ProxyFix
from flask.ext.sqlalchemy import SQLAlchemy
from raven.contrib.flask import Sentry

app = Flask(__name__)
app.config.from_envvar('LUNAPORT_CFG')
app.wsgi_app = ProxyFix(app.wsgi_app)  # Fix for old proxyes
db = SQLAlchemy(app)

if os.environ.get('LUNAPORT_ENV') == 'production':
    sentry = Sentry(app, dsn=app.config.get('SENTRY_DSN'))
    sentry.init_app(app)

from plugg_views import User, Issue, Test, Host, Stat, Chart, Eval, Case, \
    Artefact, Job, Assert, Proj, Notification, Token, Ammo, Dc, Line, \
    HealthStatus, Hook, HookRegistration 
from helpers import auth_required

user_ident = auth_required(User.UserIdent.as_view('user_ident'))
user_view = auth_required(User.User.as_view('user'))
test_view = auth_required(Test.Test.as_view('test'))
case_view = auth_required(Case.Case.as_view('case'))
Example #27
0
def init_app(app, **kwargs):
    """Initialize the Flask app located in the module sipa.
    This initializes the Flask app by:
    * calling the internal init_app() procedures of each module
    * registering the Blueprints
    * registering the Jinja global variables
    :return: None
    """
    load_config_file(app, config=kwargs.pop('config', None))
    app.wsgi_app = ProxyFix(app.wsgi_app, app.config['NUM_PROXIES'])
    init_logging(app)
    init_env_and_config(app)
    logger.debug('Initializing app')
    login_manager.init_app(app)
    babel = Babel()
    babel.init_app(app)
    babel.localeselector(select_locale)
    app.before_request(save_user_locale_setting)
    app.session_interface = SeparateLocaleCookieSessionInterface()
    cf_pages = CategorizedFlatPages()
    cf_pages.init_app(app)
    backends = Backends()
    backends.init_app(app)
    QRcode(app)

    app.url_map.converters['int'] = IntegerConverter

    from sipa.blueprints import bp_features, bp_usersuite, \
        bp_pages, bp_documents, bp_news, bp_generic, bp_hooks

    logger.debug('Registering blueprints')
    app.register_blueprint(bp_generic)
    app.register_blueprint(bp_features)
    app.register_blueprint(bp_usersuite)
    app.register_blueprint(bp_pages)
    app.register_blueprint(bp_documents)
    app.register_blueprint(bp_news)
    app.register_blueprint(bp_hooks)

    from sipa.model import query_gauge_data
    logger.debug('Registering Jinja globals')
    form_label_width = 3
    form_input_width = 7
    app.jinja_env.globals.update(
        cf_pages=cf_pages,
        gauge_data=query_gauge_data,
        get_locale=get_locale,
        get_weekday=get_weekday,
        possible_locales=possible_locales,
        get_attribute_endpoint=get_attribute_endpoint,
        traffic_chart=provide_render_function(generate_traffic_chart),
        credit_chart=provide_render_function(generate_credit_chart),
        current_datasource=backends.current_datasource,
        form_label_width_class="col-sm-{}".format(form_label_width),
        form_input_width_class="col-sm-{}".format(form_input_width),
        form_input_offset_class="col-sm-offset-{}".format(form_label_width),
        url_self=url_self,
    )
    logger.debug("Jinja globals have been set",
                 extra={'data': {'jinja_globals': app.jinja_env.globals}})

    backends.init_backends()
Example #28
0
def create_app(config='CTFd.config.Config'):
    app = CTFdFlask(__name__)
    with app.app_context():
        app.config.from_object(config)

        theme_loader = ThemeLoader(os.path.join(app.root_path, 'themes'),
                                   followlinks=True)
        app.jinja_loader = theme_loader

        from CTFd.models import db, Teams, Solves, Challenges, Fails, Flags, Tags, Files, Tracking

        url = create_database()

        # This allows any changes to the SQLALCHEMY_DATABASE_URI to get pushed back in
        # This is mostly so we can force MySQL's charset
        app.config['SQLALCHEMY_DATABASE_URI'] = str(url)

        # Register database
        db.init_app(app)

        # Register Flask-Migrate
        migrations.init_app(app, db)

        # Alembic sqlite support is lacking so we should just create_all anyway
        if url.drivername.startswith('sqlite'):
            db.create_all()
            stamp()
        else:
            # This creates tables instead of db.create_all()
            # Allows migrations to happen properly
            upgrade()

        from CTFd.models import ma

        ma.init_app(app)

        app.db = db
        app.VERSION = __version__

        from CTFd.cache import cache

        cache.init_app(app)
        app.cache = cache

        # If you have multiple workers you must have a shared cache
        socketio.init_app(app,
                          async_mode=app.config.get('SOCKETIO_ASYNC_MODE'),
                          message_queue=app.config.get('CACHE_REDIS_URL'))

        if app.config.get('REVERSE_PROXY'):
            app.wsgi_app = ProxyFix(app.wsgi_app)

        version = utils.get_config('ctf_version')

        # Upgrading from an older version of CTFd
        if version and (StrictVersion(version) < StrictVersion(__version__)):
            if confirm_upgrade():
                run_upgrade()
            else:
                exit()

        if not version:
            utils.set_config('ctf_version', __version__)

        if not utils.get_config('ctf_theme'):
            utils.set_config('ctf_theme', 'core')

        update_check(force=True)

        init_request_processors(app)
        init_template_filters(app)
        init_template_globals(app)

        # Importing here allows tests to use sensible names (e.g. api instead of api_bp)
        from CTFd.views import views
        from CTFd.teams import teams
        from CTFd.users import users
        from CTFd.challenges import challenges
        from CTFd.scoreboard import scoreboard
        from CTFd.auth import auth
        from CTFd.admin import admin
        from CTFd.api import api
        from CTFd.events import events
        from CTFd.errors import page_not_found, forbidden, general_error, gateway_error

        app.register_blueprint(views)
        app.register_blueprint(teams)
        app.register_blueprint(users)
        app.register_blueprint(challenges)
        app.register_blueprint(scoreboard)
        app.register_blueprint(auth)
        app.register_blueprint(api)
        app.register_blueprint(events)

        app.register_blueprint(admin)

        app.register_error_handler(404, page_not_found)
        app.register_error_handler(403, forbidden)
        app.register_error_handler(500, general_error)
        app.register_error_handler(502, gateway_error)

        init_logs(app)
        init_plugins(app)

        return app
Example #29
0
import base64
import random
import logging
from datetime import datetime
from werkzeug.contrib.fixers import ProxyFix

DOMAIN_NAME = "http://www.yourdomain.com"
IMG_DIR = "/path/to/your/image/dir"
TOKEN = "YourToken"
logging.basicConfig(filename="./log/run.log",
                    filemode="a",
                    format="%(asctime)s-%(name)s-%(levelname)s-%(message)s",
                    level=logging.INFO)

app = Flask(__name__)
app.wsgi_app = ProxyFix(app.wsgi_app)


@app.route('/get/<img_name>', methods=['GET'])
def download(img_name):
    img_path = "%s/%s" % (IMG_DIR, img_name)
    if not os.path.isfile(img_path):
        return jsonify({"ret": -1, "msg": "invalid file name:%s" % (img_name)})

    mdict = {
        'jpeg': 'image/jpeg',
        'jpg': 'image/jpeg',
        'png': 'image/png',
        'gif': 'image/gif'
    }
    img_type = (img_name.split('/')[-1]).split('.')[-1]
Example #30
0
File: app.py Project: xzwupeng/quay
else:
    from config import DefaultConfig
    logger.debug('Loading default config.')
    app.config.from_object(DefaultConfig())
    app.teardown_request(database.close_db_filter)

# Load the override config via the provider.
config_provider.update_app_config(app.config)

# Update any configuration found in the override environment variable.
environ_config = json.loads(os.environ.get(OVERRIDE_CONFIG_KEY, '{}'))
app.config.update(environ_config)

# Fix remote address handling for Flask.
if app.config.get('PROXY_COUNT', 1):
    app.wsgi_app = ProxyFix(app.wsgi_app,
                            num_proxies=app.config.get('PROXY_COUNT', 1))

# Ensure the V3 upgrade key is specified correctly. If not, simply fail.
# TODO: Remove for V3.1.
if not is_testing and not is_building and app.config.get(
        'SETUP_COMPLETE', False):
    v3_upgrade_mode = app.config.get('V3_UPGRADE_MODE')
    if v3_upgrade_mode is None:
        raise Exception(
            'Configuration flag `V3_UPGRADE_MODE` must be set. Please check the upgrade docs'
        )

    if (v3_upgrade_mode != 'background' and v3_upgrade_mode != 'complete'
            and v3_upgrade_mode != 'production-transition'
            and v3_upgrade_mode != 'post-oci-rollout'
            and v3_upgrade_mode != 'post-oci-roll-back-compat'):