def login(): gc = GlobalConfig() if gc.get("init_super_admin") == True: login_flag = 1 else: login_flag = 0 return render_template("/startup/index.html", login_flag=login_flag, version=version)
def __init__(self): GlobalConfig.__init__(self) self.__keys = ("db_type", "db_name", "db_mysql_ip", "db_mysql_username", "db_mysql_password") if self.get("init_super_admin") == False: self.gdb.init_data({ "db_type": "sqlite", "db_name": "ob_panel", "db_mysql_ip": "127.0.0.1", "db_mysql_username": "******", "db_mysql_password": "" }) self._logger = logging.getLogger("ob_panel")
def init_database(logger=None): gc = GlobalConfig.getInstance() db_env = DatabaseEnv() if logger == None: logger = g_logger db_type = db_env.getDatabaseType() if gc.get("init_super_admin") == True: if db_type == "sqlite": database_uri = "sqlite:///%s/%s.db" % (db_env.get("sqlite_dir"), db_env.get("db_name")) # elif db_type == "mysql": else: database_uri = "mysql+pymysql://%s:%s@%s/%s" % ( db_env.get("db_mysql_username"), db_env.get("db_mysql_password"), db_env.get("db_mysql_ip"), db_env.get("db_name")) gc.set("database_uri", database_uri) # let SQLAlchemy know the database URI app.config["SQLALCHEMY_DATABASE_URI"] = database_uri if not database_exists(database_uri): create_database(database_uri) db.create_all(app=app) db.session.commit() else: logger.warning( "Main database NOT initialized as starter configuration not finished yet." )
def upload_logo(uid, priv): def allowed_file(filename): return '.' in filename and filename.rsplit( '.', 1)[1] in ['png', 'jpeg', 'jpg'] rtn = returnModel("string") gc = GlobalConfig.getInstance() try: if 'file' not in request.files: return rtn.error(500) # get file object file = request.files['file'] if file.filename == "": return rtn.error(500) if file and allowed_file(file.filename): filename = md5(file.filename.encode() + os.urandom(8)).hexdigest() file.save(os.path.join(gc.get("uploads_dir"), filename)) return rtn.success(filename) return rtn.error(500) except Exception as e: logger.error(traceback.format_exc()) return rtn.error(500)
def __reboot_once(): gc = GlobalConfig.getInstance() if gc.get("_RESTART_LOCK") == True: gc.set("_RESTART_LOCK", "False") _restart_process() return rtn.success(200)
def starter_finish(): try: F = request.json gc = GlobalConfig.getInstance() db = DatabaseEnv() db_env = F.get("db_env") usr_data = { "username": F.get("username"), "email": F.get("email"), "password": F.get("password") } port_config = { "app_port": F.get("app_port"), "ftp_port": F.get("ftp_port"), "msgQ_port": F.get("msgQ_port"), "redis_port": F.get("redis_port"), "pm_port": F.get("pm_port") } if not dump_yaml_config(port_config): return rtn.error(500) if db_env == "sqlite": db.setDatabaseType("sqlite") # set init flag = True gc.set("init_super_admin", "True") gc.set("_RESTART_LOCK", "True") # then init database init_database() if init_db_data(usr_data): return rtn.success(True) else: return rtn.error(409) elif db_env == "mysql": db.setDatabaseType("mysql") _u = F.get("mysql_username") _p = F.get("mysql_password") if db.testMySQLdb(_u, _p) == True: gc.set("init_super_admin", "True") gc.set("_RESTART_LOCK", "True") db.setMySQLinfo(_u, _p) init_database() if init_db_data(usr_data): return rtn.success(True) else: return rtn.error(409) else: return rtn.error(409) else: return rtn.error(402) except: traceback.print_exc() return rtn.error(500)
def preview_server_logo(uid, priv, logo): gc = GlobalConfig.getInstance() logo_file_name = os.path.join(gc.get("uploads_dir"), logo) if os.path.exists(logo_file_name): return send_file(logo_file_name) else: abort(404)
def index(): gc = GlobalConfig.getInstance() _is_startup = gc.get("init_super_admin") if _is_startup == None or _is_startup == False: return redirect("/startup") else: return redirect("/server_inst/dashboard")
def _init_proc_pool(self): gc = GlobalConfig() # first, we have to make sure that database has been # initialized. if gc.get("init_super_admin") == True: # import dependencies here to prevent circular import from app import db from app.model import ServerInstance, JavaBinary, ServerCORE # search _q = db.session.query(ServerInstance).join(JavaBinary).join(ServerCORE).all() if _q == None: return None for item in _q: self._add_instance_to_pool(item) return True else: return None
def __init__(self): self.handler = FTPHandler self.authorizer = MD5Authorizer() self.handler.authorizer = self.authorizer self.server = None self.login_msg = "Login Successful" self.quit_msg = "GoodBye" self.listening_port = None self.server_process = None # read global config self._global_config = GlobalConfig.getInstance() if self._get_initdb_status(): self._update_account_data()
def init_directory(): gc = GlobalConfig.getInstance() dirs = [ gc.get("base_dir"), gc.get("uploads_dir"), gc.get("files_dir"), gc.get("servers_dir"), gc.get("lib_bin_dir"), gc.get("sqlite_dir"), # it's totally useless to store a directory's name into database # why not just name it? # 2017-2-7 ] for item in dirs: if not os.path.isdir(item): os.makedirs(item)
def _inst_directory(inst_id): ''' In order to create a new instance, we have to create an individual space to store files first. :return: ''' gc = GlobalConfig.getInstance() servers_dir = gc.get("servers_dir") owner = db.session.query(Users).filter(Users.id == uid).first() owner_name = owner.username dir_name = "%s_%s" % (owner_name, inst_id) logger.debug("[user_inst] dir_name = %s" % dir_name) return os.path.join(servers_dir, dir_name) pass
def _add_java_task(link, download_dir, binary_dir, version_pair): ''' add task of downloading java, with hooks. :return: (<instance>, <download_hash>) ''' def _send_finish_event(download_result, filename): # close scheduler sch_job = self.tasks_pool.get(hash).get("sch_job") if sch_job != None: sch_job.remove() # send finish event self.tasks_pool.update(hash, status=_utils.FINISH) _send_dw_signal("_download_finish", hash, True) def _network_error(e): # remove scheduler sch_job = self.tasks_pool.get(hash).get("sch_job") logger.debug("Network Error! sch:%s" % sch_job) if sch_job != None: sch_job.remove() self.tasks_pool.update(hash, status=_utils.FAIL) _send_dw_signal("_download_finish", hash, False) dp = DownloaderPool.getInstance() inst, hash = dp.newTask(link, download_dir=download_dir) # add cookies to download java directly inst.disableSSLCert() inst.setHeaders( {"Cookie": "oraclelicense=accept-securebackup-cookie"}) # Since multi thread is not stable here, # we decided to use only one thread to download it inst.set_force_singlethread(True) # global config gc = GlobalConfig.getInstance() root_dir = gc.get("lib_bin_dir") # add hook inst.addDownloadFinishHook(_send_finish_event) inst.addDownloadFinishHook(_extract_file) inst.addNetworkErrorHook(_network_error) dp.start(hash) return inst, hash
def _check_user(self, environment): def _construct_cookie(headers_raw): ''' format: ((<key>,<value>), .. ) For cookies: ('Cookie', 'A=B; C=D') :param headers_raw: :return: ''' cookies = {} _re = "^(.+)=(.+)" for x in range(0, len(headers_raw)): _key , _val = headers_raw[x] if _key.lower() == "cookie": _cookie_str = _val _cookie_str_arr = _cookie_str.split(" ") for _cookie_item in _cookie_str_arr: r = re.search(_re, _cookie_item) if r != None: cookies[r.group(1)] = r.group(2) break return cookies gc = GlobalConfig.getInstance() if gc.get("init_super_admin") == False: return (1, 0) # after initialization cookies = _construct_cookie(environment["headers_raw"]) _token = cookies.get("session_token") if _token == None: return (None, None) user = db.session.query(UserToken).join(Users).filter(UserToken.token == _token).first() if user is None: return (None, None) else: priv = user.ob_user.privilege uid = user.uid return (priv, uid)
def get_my_ip(uid, priv): gc = GlobalConfig() _url = "http://whatismyip.akamai.com/" if gc.get("my_ip_address") == "": req = Request(url=_url) resp = urlopen(req) ip_addr = resp.read().decode() # store ip address into cache gc.set("my_ip_address", ip_addr) return rtn.success(ip_addr) else: return rtn.success(gc.get("my_ip_address"))
def _set_inst_directory(self): ''' In order to create a new instance, we have to create an individual space to store files first. :return: ''' gc = GlobalConfig.getInstance() servers_dir = gc.get("servers_dir") owner = db.session.query(Users).filter( Users.id == self.owner_id).first() owner_name = owner.username curr_id = db.session.query(db.func.max( ServerInstance.inst_id)).scalar() if curr_id == None: curr_id = 0 dir_name = "%s_%s" % (owner_name, (curr_id + 1)) logger.debug("[user_inst] dir_name = %s" % dir_name) return os.path.join(servers_dir, dir_name) pass
def detect_java_environment(): rtn = returnModel("string") gc = GlobalConfig.getInstance() try: env = JavaEnv() java_envs = {"system": [], "user": []} __dir, __ver = env.findSystemJavaInfo() if __dir != None: _model = {"name": "java", "dir": "(%s)" % __dir} java_envs.get("system").append(_model) _arr = env.findUserJavaInfo() for java_ver in _arr: _model = { "name": "JDK %s" % java_ver['version'], "dir": "(%s)" % java_ver["dir"] } java_envs.get("user").append(_model) return rtn.success(java_envs) except: return rtn.error(500) pass
def add_download_java_task(self, download_link, binary_dir, version_pair, uid): gc = GlobalConfig() root_dir = gc.get("lib_bin_dir") major_ver, minor_ver = version_pair ''' when accessing this route, a new JDK starts downloading in the background. Due to the limitation of current technology, we only allow one file to download at the same time. request params: [POST] :major: <major version of java> :minor: <minor version of java> ''' def _schedule_get_progress(self, hash): # fetch and update data dp = DownloaderPool.getInstance() _t = dp.get(hash) if _t != None: inst = _t.dl _dw, _filesize = inst.getProgress() # update data on download_queue if _filesize > 0 and _dw != None and _filesize != None: self.tasks_pool.update(hash, progress=_dw / _filesize) _send_dw_signal("_get_progress", hash, (_dw, _filesize)) def _send_dw_signal(event_name, hash, result): values = { "event": event_name, "hash": hash, "result": result, "uid": uid } self.proxy.send("websocket.dw_response", values, WS_TAG.APP, reply=False) def _extract_file(download_result, filename): # for abnormal input parameters(like empty filename), the only thing is to terminate # next steps! if download_result == False or filename == None: return None logger.debug("Download Result: %s" % download_result) logger.debug("Start Extracting File...") # send extract_start event self.tasks_pool.update(hash, status=_utils.EXTRACTING) _send_dw_signal("_extract_start", hash, True) # run tar command cmd = "tar -xzf %s -C %s" % (filename, root_dir) # open archive #archive = tarfile.open(filename) p = subprocess.Popen(cmd, shell=True) rc = p.wait() # If untar file error if rc != 0: self.tasks_pool.update(hash, status=_utils.EXTRACT_FAIL) # send extract_finish event (when extract failed) _send_dw_signal("_extract_finish", hash, False) return None logger.debug("extract dir: %s, finish!" % root_dir) try: # save the version info into the database version_data = JavaBinary(major_version=major_ver, minor_version=minor_ver, bin_directory=os.path.join( root_dir, binary_dir), install_time=datetime.now()) db.session.add(version_data) db.session.commit() except: # writing database error logger.error(traceback.format_exc()) self.tasks_pool.update(hash, status=_utils.FAIL) # delete scheduler sch_job = self.tasks_pool.get(hash).get("sch_job") if sch_job != None: sch_job.remove() _send_dw_signal("_download_finish", hash, False) return self.tasks_pool.update(hash, status=_utils.FINISH) _send_dw_signal("_extract_finish", hash, True) def _add_java_task(link, download_dir, binary_dir, version_pair): ''' add task of downloading java, with hooks. :return: (<instance>, <download_hash>) ''' def _send_finish_event(download_result, filename): # close scheduler sch_job = self.tasks_pool.get(hash).get("sch_job") if sch_job != None: sch_job.remove() # send finish event self.tasks_pool.update(hash, status=_utils.FINISH) _send_dw_signal("_download_finish", hash, True) def _network_error(e): # remove scheduler sch_job = self.tasks_pool.get(hash).get("sch_job") logger.debug("Network Error! sch:%s" % sch_job) if sch_job != None: sch_job.remove() self.tasks_pool.update(hash, status=_utils.FAIL) _send_dw_signal("_download_finish", hash, False) dp = DownloaderPool.getInstance() inst, hash = dp.newTask(link, download_dir=download_dir) # add cookies to download java directly inst.disableSSLCert() inst.setHeaders( {"Cookie": "oraclelicense=accept-securebackup-cookie"}) # Since multi thread is not stable here, # we decided to use only one thread to download it inst.set_force_singlethread(True) # global config gc = GlobalConfig.getInstance() root_dir = gc.get("lib_bin_dir") # add hook inst.addDownloadFinishHook(_send_finish_event) inst.addDownloadFinishHook(_extract_file) inst.addNetworkErrorHook(_network_error) dp.start(hash) return inst, hash try: gc = GlobalConfig() files_dir = gc.get("files_dir") link = download_link binary_dir = binary_dir # version_pair : (major_version, minor_version) # e.g.: (8, 102) version_pair = version_pair if link != None: if self.tasks_pool.has_working_link(link): _send_dw_signal("_download_start", None, None) return # create new task and download inst, hash = _add_java_task(link, files_dir, binary_dir, version_pair) self.tasks_pool.add(hash, link) # start progress scheduler if not self.scheduler.running: self.scheduler.start() sch_job = self.scheduler.add_job(_schedule_get_progress, 'interval', seconds=1, args=[self, hash]) self.tasks_pool.update(hash, sch_job=sch_job) self.tasks_pool.update(hash, status=_utils.DOWNLOADING) _send_dw_signal("_download_start", hash, link) else: _send_dw_signal("_download_start", None, None) except: logger.error(traceback.format_exc())
def new_Minecraft_instance(uid, priv): ''' create a new MC Server instance. So How to create a new instance? 0. Name it. 1. Select the Server Core File (or upload it by user?) 2. Select Java Version 3. Set server.properties 4. Upload Mods & Plugins (If necessary) 5. Go For It! :return: ''' try: gc = GlobalConfig() # get all versions of java java_versions = [] java_versions_obj = db.session.query(JavaBinary).all() default_java_binary_id = int(gc.get("default_java_binary_id")) for item in java_versions_obj: _model = { "name": "1.%s.0_%s" % (item.major_version, item.minor_version), "index": item.id, "selected": "" } if item.id == default_java_binary_id: _model['selected'] = "selected" java_versions.append(_model) # get all info of server core server_cores = [] server_cores_obj = db.session.query(ServerCORE).all() for item in server_cores_obj: if item.core_version != None and item.core_version != "": _name = "%s-%s-%s" % (item.core_type, item.core_version, item.minecraft_version) else: _name = "%s-%s" % (item.core_type, item.minecraft_version) _model = {"name": _name, "index": item.core_id} server_cores.append(_model) # ...and generate an FTP account. user_name_obj = db.session.query(Users).filter(Users.id == uid).first() _safe_index = 0 while _safe_index < 30: _safe_index += 1 ftp_user_name = "%s_%s" % (user_name_obj.username, generate_random_string(3)) if db.session.query(FTPAccount).filter( FTPAccount.username == ftp_user_name).first() == None: break rtn_model = { "java_versions": java_versions, "server_cores": server_cores, "FTP_account_name": ftp_user_name } return rtn.success(rtn_model) except: return rtn.error(500)
def submit_new_inst(uid, priv): def _inst_directory(inst_id): ''' In order to create a new instance, we have to create an individual space to store files first. :return: ''' gc = GlobalConfig.getInstance() servers_dir = gc.get("servers_dir") owner = db.session.query(Users).filter(Users.id == uid).first() owner_name = owner.username dir_name = "%s_%s" % (owner_name, inst_id) logger.debug("[user_inst] dir_name = %s" % dir_name) return os.path.join(servers_dir, dir_name) pass rtn = returnModel("string") gc = GlobalConfig.getInstance() try: F = request.json inst_name = F.get("inst_name") core_file_id = F.get("core_file_id") java_bin_id = F.get("java_bin_id") listening_port = F.get("listening_port") auto_port_dispatch = F.get("auto_port_dispatch") # unit: GiB max_RAM = F.get("max_RAM") max_user = F.get("max_user") # json format server_properties = F.get("server_properties") # logo url logo_url = F.get("logo_url") # set encoded motd content motd = F.get("motd") # FTP account FTP_account_name = F.get("ftp_account") FTP_default_password = (F.get("ftp_default_password") == True) FTP_password = F.get("ftp_password") i = UserInstance(uid) try: if auto_port_dispatch: i.set_listening_port() else: i.set_listening_port(listening_port) i.set_instance_name(inst_name) i.set_java_bin(java_bin_id) i.set_allocate_RAM(int(max_RAM) * 1024) i.set_server_core(core_file_id) i.set_max_user(max_user) properties_json = json.loads(server_properties) properties_json["motd"] = motd # 'true' -> True , 'false' -> False for key in properties_json: if properties_json.get(key) == 'true': properties_json[key] = True elif properties_json.get(key) == 'false': properties_json[key] = False i.set_instance_properties(properties_json) inst_id = i.create_inst() # move logo if logo_url != None and logo_url != "": logo_file_name = os.path.join(gc.get("uploads_dir"), logo_url) if os.path.exists(logo_file_name): shutil.move( logo_file_name, os.path.join(_inst_directory(inst_id), "server-icon.png")) # create FTP accountx ftp_controller = FTPController() if not FTP_default_password: _ftp_password = FTP_password else: _ftp_password = None ftp_controller.create_account(uid, FTP_account_name, inst_id, ftp_password=_ftp_password) return rtn.success(inst_id) # return redirect("/server_inst/dashboard/%s" % inst_id) except: logger.error(traceback.format_exc()) return rtn.error(500) except Exception as e: logger.error(traceback.format_exc()) return rtn.error(500)
def decorated_function(*args, **kwargs): gc = GlobalConfig.getInstance() if gc.get("init_super_admin") == True: return abort(403) else: return fn(*args, **kwargs)
def upload_core_file(uid, priv): def _allowed_file(filename): return '.' in filename and \ filename.rsplit('.', 1)[1] in ['jar'] try: gc = GlobalConfig.getInstance() F = request.form upload_dir = gc.get("files_dir") mc_version = F.get("mc_version") file_version = F.get("file_version") description = F.get("description") core_type = F.get("core_type") file = request.files['files'] if file.filename == '': return rtn.error(404) __counter = 0 if file and _allowed_file(file.filename): _files = os.listdir(upload_dir) _filename = file.filename _ori_filename = _filename while True: if _filename in _files: __counter += 1 _filename = "x%s-%s" % (__counter, _ori_filename) else: break file.save(os.path.join(upload_dir, _filename)) # add inst to database _file = os.path.join(upload_dir, _filename) inst = ServerCORE( file_name = _filename, file_size = os.path.getsize(_file), file_dir = upload_dir, create_time = datetime.now(), file_hash = get_file_hash(_file), core_type = core_type, core_version= file_version, minecraft_version = mc_version, file_uploader = uid, note = description ) db.session.add(inst) db.session.commit() return rtn.success(200) else: return rtn.error(411) except: logger.error(traceback.format_exc()) return rtn.error(500)
def migrate_superadmin(): ''' This function aims to migrate superadmin's account data (including username, email , password hash) from temporal SQLite database to main database. Why exists? At the beginning, database setting has not been configured yet. Thus it's impossible to store superadmin's account data to user's database directly. How it works? read superadmin's account data from GlobalConfig database (in which the data is stored when step 1 is done.) and run init_database() to ensure SQLAlchemy API is available. Next, just use the API to insert data and delete the original one since there's no reason to keep it then. :return: ''' if app.config.get("SQLALCHEMY_DATABASE_URI") == None: # ensure main database is initialized and SQLAlchemy available. init_database() # read data from GlobalConfig database gc = GlobalConfig() _username = gc.get("temp_superadmin_username") _email = gc.get("temp_superadmin_email") _hash = gc.get("temp_superadmin_hash") #for superadmin, privilege = 1 try: super_admin_user = Users(username=_username, privilege=PRIVILEGES.ROOT_USER, email=_email, hash=_hash) try: super_admin_user.insert_byhash() except: traceback.print_exc() # if everything works correctly <including the inserting operation above>, # it is time to delete account data gc.set("temp_superadmin_username", "") gc.set("temp_superadmin_email", "") gc.set("temp_superadmin_hash", "") # for empty value, just emit it return True except: g_logger.error(traceback.format_exc()) return False
def gc(): return GlobalConfig()
from app.tools.mq_proxy import WS_TAG, MessageQueueProxy from ob_logger import Logger logger = Logger("APP", debug=True) app = Flask(__name__) # shut up, please. I don't wanna see your useless notice again !! app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = True # close SQLalchemy debug mode app.config["SQLALCHEMY_ECHO"] = False app.config['SECRET_KEY'] = 'secret!' app.config['REDIS_QUEUE_KEY'] = 'reboot_queue' gc = GlobalConfig.getInstance() # set sqlalchemy database uri if gc.get("database_uri") != None: app.config["SQLALCHEMY_DATABASE_URI"] = gc.get("database_uri") # init flask-SQLAlchemy db = SQLAlchemy(app) # read config.yaml directly zmq_port = int(yaml.load(open("config.yaml","r")).get("broker").get("listen_port")) proxy = MessageQueueProxy(WS_TAG.APP ,router_port=zmq_port) # import blueprints # to event circular importing, this `import` statement should be put # after database declared.
def __init__(self): GlobalConfig.__init__(self) self.__keys = ("sys_java_dir", "sys_java_version") if self.get("init_super_admin") == False: self.gdb.init_data({"sys_java_dir": "", "sys_java_version": ""})