def index(): js_bundle_path = os.path.join(app.config["STATIC_DIR"], "js", "dist", "main.bundle.js") css_bundle_path = os.path.join(app.config["STATIC_DIR"], "css", "dist", "main.css") front_end_config = [ "FLASK_ENV", "TELEMETRY_DISABLED", "ENVIRONMENT_DEFAULTS", "ORCHEST_WEB_URLS", ] front_end_config_internal = [ "ORCHEST_SOCKETIO_ENV_BUILDING_NAMESPACE", "PIPELINE_PARAMETERS_RESERVED_KEY", ] return render_template( "index.html", javascript_bundle_hash=get_hash(js_bundle_path), css_bundle_hash=get_hash(css_bundle_path), user_config=get_user_conf(), config_json=flask_json.htmlsafe_dumps({ **{key: app.config[key] for key in front_end_config}, **{ key: getattr(_config, key) for key in front_end_config_internal }, }), )
def server_config(): front_end_config = [ "FLASK_ENV", "TELEMETRY_DISABLED", "ENVIRONMENT_DEFAULTS", "ORCHEST_WEB_URLS", "CLOUD", "GPU_REQUEST_URL", "GPU_ENABLED_INSTANCE", "CLOUD_UNMODIFIABLE_CONFIG_VALUES", "INTERCOM_APP_ID", "INTERCOM_DEFAULT_SIGNUP_DATE", ] front_end_config_internal = [ "ORCHEST_SOCKETIO_ENV_BUILDING_NAMESPACE", "ORCHEST_SOCKETIO_JUPYTER_BUILDING_NAMESPACE", "PIPELINE_PARAMETERS_RESERVED_KEY", ] return jsonify({ "user_config": get_user_conf(), "config": { **{key: app.config[key] for key in front_end_config}, **{ key: getattr(_config, key) for key in front_end_config_internal }, }, })
def is_authenticated(request): # if auth_enabled request is always authenticated config_data = get_user_conf() if not config_data["AUTH_ENABLED"]: return True cookie_token = request.cookies.get("auth_token") username = request.cookies.get("auth_username") user = User.query.filter(User.username == username).first() if user is None: return False token = ( Token.query.filter(Token.token == cookie_token) .filter(Token.user == user.uuid) .first() ) if token is None: return False else: token_creation_limit = datetime.datetime.utcnow() - datetime.timedelta( days=app.config["TOKEN_DURATION_HOURS"] ) if token.created > token_creation_limit: return True else: return False
def admin(): context = static_render_context() data_json = {"users": []} return_status = 200 config_data = get_user_conf() if not is_authenticated(request) and config_data["AUTH_ENABLED"]: return "", 401 if request.method == "POST": if "username" in request.form: username = request.form.get("username") password = request.form.get("password") user = User.query.filter(User.username == username).first() if user is not None: data_json.update({"error": "User already exists."}) return_status = 409 elif len(password) == 0: data_json.update({"error": "Password cannot be empty."}) return_status = 409 else: user = User( username=username, password_hash=generate_password_hash(password), uuid=str(uuid.uuid4()), ) db.session.add(user) db.session.commit() elif "delete_username" in request.form: username = request.form.get("delete_username") user = User.query.filter(User.username == username).first() if user is not None: if user.is_admin: data_json.update( {"error": "Admins cannot be deleted."}) return_status = 409 else: db.session.delete(user) db.session.commit() users = User.query.all() for user in users: data_json["users"].append({"username": user.username}) context["data_json"] = json.dumps(data_json) return render_template("admin.html", **context), return_status
def index(): js_bundle_path = os.path.join(app.config["STATIC_DIR"], "js", "dist", "main.bundle.js") css_bundle_path = os.path.join(app.config["STATIC_DIR"], "css", "main.css") return render_template("index.html", javascript_bundle_hash=get_hash(js_bundle_path), css_bundle_hash=get_hash(css_bundle_path), user_config=get_user_conf())
def index(): config_data = get_user_conf() if not config_data["AUTH_ENABLED"]: return "", 200 else: # validate authentication through token if is_authenticated(request): return "", 200 else: return "", 401
def login(): config_data = get_user_conf() if not config_data["AUTH_ENABLED"]: return make_response( render_template("client_side_redirect.html", url="/")) if request.method == "POST": username = request.form.get("username") password = request.form.get("password") token = request.form.get("token") user = User.query.filter(User.username == username).first() if user is None: return render_login_failed() else: if password is not None: can_login = check_password_hash(user.password_hash, password) elif token is not None and user.token_hash is not None: can_login = check_password_hash(user.token_hash, token) else: can_login = False if can_login: # remove old token if it exists Token.query.filter(Token.user == user.uuid).delete() token = Token(user=user.uuid, token=str(secrets.token_hex(16))) db.session.add(token) db.session.commit() resp = make_response( render_template("client_side_redirect.html", url="/")) resp.set_cookie("auth_token", token.token) resp.set_cookie("auth_username", username) return resp else: return render_login_failed() else: return render_template("login.html", **static_render_context())
def admin(): config_data = get_user_conf() if not is_authenticated(request) and config_data["AUTH_ENABLED"]: return "", 401 if request.method == "POST": if "username" in request.form: username = request.form.get("username") password = request.form.get("password") user = User.query.filter(User.username == username).first() if user is not None: return "", 409 user = User( username=username, password_hash=generate_password_hash(password), uuid=str(uuid.uuid4()), ) db.session.add(user) db.session.commit() elif "delete_username" in request.form: username = request.form.get("delete_username") user = User.query.filter(User.username == username).first() if user is not None: db.session.delete(user) db.session.commit() context = static_render_context() data_json = {"users": []} users = User.query.all() for user in users: data_json["users"].append({"username": user.username}) context["data_json"] = json.dumps(data_json) return render_template("admin.html", **context)
def handle_login(redirect_type="client"): config_data = get_user_conf() if not config_data["AUTH_ENABLED"]: return redirect_response("/", redirect_type) if request.method == "POST": username = request.form.get("username") password = request.form.get("password") token = request.form.get("token") user = User.query.filter(User.username == username).first() if user is None: return jsonify({"error": "User does not exist."}), 401 else: if password is not None: can_login = check_password_hash(user.password_hash, password) elif token is not None and user.token_hash is not None: can_login = check_password_hash(user.token_hash, token) else: can_login = False if can_login: # remove old token if it exists Token.query.filter(Token.user == user.uuid).delete() token = Token(user=user.uuid, token=str(secrets.token_hex(16))) db.session.add(token) db.session.commit() redirect_url = request.args.get("redirect_url", "/") resp = redirect_response(redirect_url, redirect_type) resp.set_cookie("auth_token", token.token) resp.set_cookie("auth_username", username) return resp else: return jsonify({"error": "Invalid credentials."}), 401
def login(): config_data = get_user_conf() if not config_data['AUTH_ENABLED']: return make_response( render_template("client_side_redirect.html", url="/")) if request.method == "POST": username = request.form.get('username') password = request.form.get('password') user = User.query.filter(User.username == username).first() if user is None: return '', 401 else: if check_password_hash(user.password_hash, password): # remove old token if it exists Token.query.filter(Token.user == user.uuid).delete() token = Token(user=user.uuid, token=str(secrets.token_hex(16))) db.session.add(token) db.session.commit() resp = make_response( render_template("client_side_redirect.html", url="/")) resp.set_cookie("auth_token", token.token) resp.set_cookie("auth_username", username) return resp else: return '', 401 else: context = static_render_context() return render_template("login.html", **context)
def create_app(): logging.basicConfig(stream=sys.stdout, level=logging.INFO) app = Flask(__name__) app.config.from_object(CONFIG_CLASS) socketio = SocketIO(app, cors_allowed_origins="*") logging.getLogger("engineio").setLevel(logging.ERROR) # read directory mount based config into Flask config try: conf_data = get_user_conf() app.config.update(conf_data) except Exception as e: logging.warning("Failed to load config.json") logging.info("Flask CONFIG: %s" % app.config) db.init_app(app) # according to SQLAlchemy will only create tables if they do not exist with app.app_context(): db.create_all() initialize_default_images(db) initialize_default_datasources(db, app) logging.info("Initializing kernels") populate_kernels(app, db) # static file serving @app.route("/public/<path:path>") def send_files(path): return send_from_directory("../static", path) register_views(app, db) register_build_views(app, db, socketio) register_socketio_broadcast(db, socketio) if ("TELEMETRY_DISABLED" not in app.config and os.environ.get("FLASK_ENV") != "development"): # create thread for analytics scheduler = BackgroundScheduler() # send a ping now analytics_ping(app) # and every 15 minutes scheduler.add_job( analytics_ping, "interval", minutes=app.config["TELEMETRY_INTERVAL"], args=[app], ) scheduler.start() processes = [] if process_start_gate(): file_dir = os.path.dirname(os.path.realpath(__file__)) # TODO: reconsider file permission approach # file_permission_watcher process permission_process = Popen( [ "python3", "-m", "scripts.file_permission_watcher", app.config["USER_DIR"], ], cwd=os.path.join(file_dir, ".."), stderr=subprocess.STDOUT, ) logging.info("Started file_permission_watcher.py") processes.append(permission_process) # docker_builder process docker_builder_process = Popen( ["python3", "-m", "scripts.docker_builder"], cwd=os.path.join(file_dir, ".."), stderr=subprocess.STDOUT, ) logging.info("Started docker_builder.py") processes.append(docker_builder_process) # log_streamer process log_streamer_process = Popen( ["python3", "-m", "scripts.log_streamer"], cwd=os.path.join(file_dir, ".."), stderr=subprocess.STDOUT, ) logging.info("Started log_streamer.py") processes.append(log_streamer_process) return app, socketio, processes
def create_app(): app = Flask(__name__) app.config.from_object(CONFIG_CLASS) init_logging() socketio = SocketIO(app, cors_allowed_origins="*") if os.getenv("FLASK_ENV") == "development": app = register_teardown_request(app) # read directory mount based config into Flask config try: conf_data = get_user_conf() app.config.update(conf_data) except Exception: app.logger.warning("Failed to load config.json") app.config["ORCHEST_REPO_TAG"] = get_repo_tag() # create thread for non-cpu bound background tasks, e.g. requests scheduler = BackgroundScheduler( job_defaults={ # Infinite amount of grace time, so that if a task cannot be # instantly executed (e.g. if the webserver is busy) then it # will eventually be. "misfire_grace_time": 2**31, "coalesce": False, # So that the same job can be in the queue an infinite # amount of times, e.g. for concurrent requests issuing the # same tasks. "max_instances": 2**31, }) app.config["SCHEDULER"] = scheduler scheduler.start() app.logger.info("Flask CONFIG: %s" % app.config) # Create the database if it does not exist yet. Roughly equal to a # "CREATE DATABASE IF NOT EXISTS <db_name>" call. if not database_exists(app.config["SQLALCHEMY_DATABASE_URI"]): create_database(app.config["SQLALCHEMY_DATABASE_URI"]) db.init_app(app) ma.init_app(app) # necessary for migration Migrate().init_app(app, db) with app.app_context(): # Alembic does not support calling upgrade() concurrently if not is_werkzeug_parent(): # Upgrade to the latest revision. This also takes care of # bringing an "empty" db (no tables) on par. try: upgrade() except Exception as e: logging.error("Failed to run upgrade() %s [%s]" % (e, type(e))) # On startup all kernels are freshed. This is because # updating Orchest might make the kernels in the # userdir/.orchest/kernels directory invalid. projs = Project.query.all() for proj in projs: try: populate_kernels(app, db, proj.uuid) except Exception as e: logging.error( "Failed to populate kernels on startup for project %s: %s [%s]" % (proj.uuid, e, type(e))) # To avoid multiple removals in case of a flask --reload, so # that this code runs once per container. try: os.mkdir("/tmp/jupyter_lock_removed") lock_path = os.path.join("/userdir", _config.JUPYTER_USER_CONFIG, "lab", ".bootlock") if os.path.exists(lock_path): app.logger.info("Removing dangling jupyter boot lock.") os.rmdir(lock_path) except FileExistsError: app.logger.info("/tmp/jupyter_lock_removed exists. " " Not removing the lock again.") # Telemetry if not app.config["TELEMETRY_DISABLED"]: # initialize posthog posthog.api_key = base64.b64decode( app.config["POSTHOG_API_KEY"]).decode() posthog.host = app.config["POSTHOG_HOST"] # send a ping now analytics_ping(app) # and every 15 minutes scheduler.add_job( analytics_ping, "interval", minutes=app.config["TELEMETRY_INTERVAL"], args=[app], ) # static file serving @app.route("/", defaults={"path": ""}, methods=["GET"]) @app.route("/<path:path>", methods=["GET"]) def index(path): # in Debug mode proxy to CLIENT_DEV_SERVER_URL if os.environ.get("FLASK_ENV") == "development": return _proxy(request, app.config["CLIENT_DEV_SERVER_URL"] + "/") else: file_path = os.path.join(app.config["STATIC_DIR"], path) if os.path.isfile(file_path): return send_from_directory(app.config["STATIC_DIR"], path) else: return send_from_directory(app.config["STATIC_DIR"], "index.html") register_views(app, db) register_orchest_api_views(app, db) register_background_tasks_view(app, db) register_socketio_broadcast(socketio) register_analytics_views(app, db) processes = [] if not is_werkzeug_parent(): file_dir = os.path.dirname(os.path.realpath(__file__)) # log_streamer process log_streamer_process = Popen( ["python3", "-m", "scripts.log_streamer"], cwd=os.path.join(file_dir, ".."), stderr=subprocess.STDOUT, ) app.logger.info("Started log_streamer.py") processes.append(log_streamer_process) return app, socketio, processes
def create_app(): logging.basicConfig(stream=sys.stdout, level=logging.INFO) app = Flask(__name__) app.config.from_object(CONFIG_CLASS) socketio = SocketIO(app, cors_allowed_origins="*") # read directory mount based config into Flask config try: conf_data = get_user_conf() app.config.update(conf_data) except Exception as e: logging.warning("Failed to load config.json") logging.info("Flask CONFIG: %s" % app.config) db.init_app(app) # according to SQLAlchemy will only create tables if they do not exist with app.app_context(): db.create_all() initialize_default_images(db) logging.info("Initializing kernels") populate_kernels(app, db) # static file serving @app.route('/public/<path:path>') def send_files(path): return send_from_directory("../static", path) register_views(app, db) register_build_views(app, db, socketio) if "TELEMETRY_DISABLED" not in app.config: # create thread for analytics scheduler = BackgroundScheduler() # send a ping now analytics_ping(app) # and every 15 minutes scheduler.add_job(analytics_ping, 'interval', minutes=app.config["TELEMETRY_INTERVAL"], args=[app]) scheduler.start() # Start threaded file_permission_watcher # TODO: reconsider file permission approach # Note: process is never cleaned up, this is permissible because it's only # executed inside a container. watcher_file = "/tmp/file_permission_watcher_active" # guarantee no two python file permission watchers are started if not os.path.isfile(watcher_file): with open(watcher_file, "w") as file: file.write("1") file_dir = os.path.dirname(os.path.realpath(__file__)) permission_process = Popen([ os.path.join(file_dir, "scripts", "file_permission_watcher.py"), app.config["USER_DIR"] ]) logging.info("Started file_permission_watcher.py") return app, socketio
def create_app(): logging.basicConfig(stream=sys.stdout, level=logging.INFO) app = Flask(__name__) app.config.from_object(CONFIG_CLASS) socketio = SocketIO(app, cors_allowed_origins="*") logging.getLogger("engineio").setLevel(logging.ERROR) # read directory mount based config into Flask config try: conf_data = get_user_conf() app.config.update(conf_data) except Exception as e: logging.warning("Failed to load config.json") app.config["ORCHEST_REPO_TAG"] = get_repo_tag() # create thread for non-cpu bound background tasks, e.g. # requests scheduler = BackgroundScheduler() app.config["SCHEDULER"] = scheduler logging.info("Flask CONFIG: %s" % app.config) db.init_app(app) ma.init_app(app) # according to SQLAlchemy will only create tables if they do not exist with app.app_context(): db.create_all() # this class is only serialized on disk # see the Environment model # to avoid errors like querying the Environments table # to see if there is any environemnt (and getting # no environments while they might actually be there), the # table is deleted, so that it will produce a ~loud~ error Environment.__table__.drop(db.engine) initialize_default_datasources(db, app) # Telemetry if not app.config["TELEMETRY_DISABLED"]: # initialize posthog posthog.api_key = base64.b64decode(app.config["POSTHOG_API_KEY"]).decode() posthog.host = app.config["POSTHOG_HOST"] # send a ping now analytics_ping(app) # and every 15 minutes scheduler.add_job( analytics_ping, "interval", minutes=app.config["TELEMETRY_INTERVAL"], args=[app], ) scheduler.start() # static file serving @app.route("/public/<path:path>") def send_files(path): return send_from_directory("../static", path) register_views(app, db) register_orchest_api_views(app, db) register_background_tasks_view(app, db) register_socketio_broadcast(db, socketio) register_analytics_views(app, db) processes = [] if process_start_gate(): file_dir = os.path.dirname(os.path.realpath(__file__)) # TODO: reconsider file permission approach # file_permission_watcher process permission_process = Popen( [ "python3", "-m", "scripts.file_permission_watcher", app.config["USER_DIR"], ], cwd=os.path.join(file_dir, ".."), stderr=subprocess.STDOUT, ) logging.info("Started file_permission_watcher.py") processes.append(permission_process) # log_streamer process log_streamer_process = Popen( ["python3", "-m", "scripts.log_streamer"], cwd=os.path.join(file_dir, ".."), stderr=subprocess.STDOUT, ) logging.info("Started log_streamer.py") processes.append(log_streamer_process) return app, socketio, processes
def create_app(): app = Flask(__name__) app.config.from_object(CONFIG_CLASS) init_logging() socketio = SocketIO(app, cors_allowed_origins="*") if os.getenv("FLASK_ENV") == "development": app = register_teardown_request(app) # read directory mount based config into Flask config try: conf_data = get_user_conf() app.config.update(conf_data) except Exception: app.logger.warning("Failed to load config.json") app.config["ORCHEST_REPO_TAG"] = get_repo_tag() # create thread for non-cpu bound background tasks, e.g. requests scheduler = BackgroundScheduler( job_defaults={ # Infinite amount of grace time, so that if a task cannot be # instantly executed (e.g. if the webserver is busy) then it # will eventually be. "misfire_grace_time": 2**31, "coalesce": False, # So that the same job can be in the queue an infinite # amount of times, e.g. for concurrent requests issuing the # same tasks. "max_instances": 2**31, }) app.config["SCHEDULER"] = scheduler scheduler.start() app.logger.info("Flask CONFIG: %s" % app.config) # Create the database if it does not exist yet. Roughly equal to a # "CREATE DATABASE IF NOT EXISTS <db_name>" call. if not database_exists(app.config["SQLALCHEMY_DATABASE_URI"]): create_database(app.config["SQLALCHEMY_DATABASE_URI"]) db.init_app(app) ma.init_app(app) # necessary for migration Migrate().init_app(app, db) with app.app_context(): # Alembic does not support calling upgrade() concurrently if not is_werkzeug_parent(): # Upgrade to the latest revision. This also takes care of # bringing an "empty" db (no tables) on par. try: upgrade() except Exception as e: logging.error("Failed to run upgrade() %s [%s]" % (e, type(e))) initialize_default_datasources(db, app) # Telemetry if not app.config["TELEMETRY_DISABLED"]: # initialize posthog posthog.api_key = base64.b64decode( app.config["POSTHOG_API_KEY"]).decode() posthog.host = app.config["POSTHOG_HOST"] # send a ping now analytics_ping(app) # and every 15 minutes scheduler.add_job( analytics_ping, "interval", minutes=app.config["TELEMETRY_INTERVAL"], args=[app], ) # static file serving @app.route("/public/<path:path>") def send_files(path): return send_from_directory("../static", path) register_views(app, db) register_orchest_api_views(app, db) register_background_tasks_view(app, db) register_socketio_broadcast(db, socketio) register_analytics_views(app, db) processes = [] if not is_werkzeug_parent(): file_dir = os.path.dirname(os.path.realpath(__file__)) # log_streamer process log_streamer_process = Popen( ["python3", "-m", "scripts.log_streamer"], cwd=os.path.join(file_dir, ".."), stderr=subprocess.STDOUT, ) app.logger.info("Started log_streamer.py") processes.append(log_streamer_process) return app, socketio, processes
from config import CONFIG_CLASS from app import create_app from app.utils import get_user_conf conf_data = get_user_conf() app = create_app(config_class=CONFIG_CLASS) app.config.update(conf_data) if __name__ == "__main__": app.run(host="0.0.0.0", port=80)