def get_my_ip(uid, priv): gc = GlobalConfig() _url = "http://whatismyip.akamai.com/" if gc.get("my_ip_address") == "": req = Request(url=_url) resp = urlopen(req) ip_addr = resp.read().decode() # store ip address into cache gc.set("my_ip_address", ip_addr) return rtn.success(ip_addr) else: return rtn.success(gc.get("my_ip_address"))
def migrate_superadmin(): ''' This function aims to migrate superadmin's account data (including username, email , password hash) from temporal SQLite database to main database. Why exists? At the beginning, database setting has not been configured yet. Thus it's impossible to store superadmin's account data to user's database directly. How it works? read superadmin's account data from GlobalConfig database (in which the data is stored when step 1 is done.) and run init_database() to ensure SQLAlchemy API is available. Next, just use the API to insert data and delete the original one since there's no reason to keep it then. :return: ''' if app.config.get("SQLALCHEMY_DATABASE_URI") == None: # ensure main database is initialized and SQLAlchemy available. init_database() # read data from GlobalConfig database gc = GlobalConfig() _username = gc.get("temp_superadmin_username") _email = gc.get("temp_superadmin_email") _hash = gc.get("temp_superadmin_hash") #for superadmin, privilege = 1 try: super_admin_user = Users(username=_username, privilege=PRIVILEGES.ROOT_USER, email=_email, hash=_hash) try: super_admin_user.insert_byhash() except: traceback.print_exc() # if everything works correctly <including the inserting operation above>, # it is time to delete account data gc.set("temp_superadmin_username", "") gc.set("temp_superadmin_email", "") gc.set("temp_superadmin_hash", "") # for empty value, just emit it return True except: g_logger.error(traceback.format_exc()) return False
def login(): gc = GlobalConfig() if gc.get("init_super_admin") == True: login_flag = 1 else: login_flag = 0 return render_template("/startup/index.html", login_flag=login_flag, version=version)
def _init_proc_pool(self): gc = GlobalConfig() # first, we have to make sure that database has been # initialized. if gc.get("init_super_admin") == True: # import dependencies here to prevent circular import from app import db from app.model import ServerInstance, JavaBinary, ServerCORE # search _q = db.session.query(ServerInstance).join(JavaBinary).join(ServerCORE).all() if _q == None: return None for item in _q: self._add_instance_to_pool(item) return True else: return None
def add_download_java_task(self, download_link, binary_dir, version_pair, uid): gc = GlobalConfig() root_dir = gc.get("lib_bin_dir") major_ver, minor_ver = version_pair ''' when accessing this route, a new JDK starts downloading in the background. Due to the limitation of current technology, we only allow one file to download at the same time. request params: [POST] :major: <major version of java> :minor: <minor version of java> ''' def _schedule_get_progress(self, hash): # fetch and update data dp = DownloaderPool.getInstance() _t = dp.get(hash) if _t != None: inst = _t.dl _dw, _filesize = inst.getProgress() # update data on download_queue if _filesize > 0 and _dw != None and _filesize != None: self.tasks_pool.update(hash, progress=_dw / _filesize) _send_dw_signal("_get_progress", hash, (_dw, _filesize)) def _send_dw_signal(event_name, hash, result): values = { "event": event_name, "hash": hash, "result": result, "uid": uid } self.proxy.send("websocket.dw_response", values, WS_TAG.APP, reply=False) def _extract_file(download_result, filename): # for abnormal input parameters(like empty filename), the only thing is to terminate # next steps! if download_result == False or filename == None: return None logger.debug("Download Result: %s" % download_result) logger.debug("Start Extracting File...") # send extract_start event self.tasks_pool.update(hash, status=_utils.EXTRACTING) _send_dw_signal("_extract_start", hash, True) # run tar command cmd = "tar -xzf %s -C %s" % (filename, root_dir) # open archive #archive = tarfile.open(filename) p = subprocess.Popen(cmd, shell=True) rc = p.wait() # If untar file error if rc != 0: self.tasks_pool.update(hash, status=_utils.EXTRACT_FAIL) # send extract_finish event (when extract failed) _send_dw_signal("_extract_finish", hash, False) return None logger.debug("extract dir: %s, finish!" % root_dir) try: # save the version info into the database version_data = JavaBinary(major_version=major_ver, minor_version=minor_ver, bin_directory=os.path.join( root_dir, binary_dir), install_time=datetime.now()) db.session.add(version_data) db.session.commit() except: # writing database error logger.error(traceback.format_exc()) self.tasks_pool.update(hash, status=_utils.FAIL) # delete scheduler sch_job = self.tasks_pool.get(hash).get("sch_job") if sch_job != None: sch_job.remove() _send_dw_signal("_download_finish", hash, False) return self.tasks_pool.update(hash, status=_utils.FINISH) _send_dw_signal("_extract_finish", hash, True) def _add_java_task(link, download_dir, binary_dir, version_pair): ''' add task of downloading java, with hooks. :return: (<instance>, <download_hash>) ''' def _send_finish_event(download_result, filename): # close scheduler sch_job = self.tasks_pool.get(hash).get("sch_job") if sch_job != None: sch_job.remove() # send finish event self.tasks_pool.update(hash, status=_utils.FINISH) _send_dw_signal("_download_finish", hash, True) def _network_error(e): # remove scheduler sch_job = self.tasks_pool.get(hash).get("sch_job") logger.debug("Network Error! sch:%s" % sch_job) if sch_job != None: sch_job.remove() self.tasks_pool.update(hash, status=_utils.FAIL) _send_dw_signal("_download_finish", hash, False) dp = DownloaderPool.getInstance() inst, hash = dp.newTask(link, download_dir=download_dir) # add cookies to download java directly inst.disableSSLCert() inst.setHeaders( {"Cookie": "oraclelicense=accept-securebackup-cookie"}) # Since multi thread is not stable here, # we decided to use only one thread to download it inst.set_force_singlethread(True) # global config gc = GlobalConfig.getInstance() root_dir = gc.get("lib_bin_dir") # add hook inst.addDownloadFinishHook(_send_finish_event) inst.addDownloadFinishHook(_extract_file) inst.addNetworkErrorHook(_network_error) dp.start(hash) return inst, hash try: gc = GlobalConfig() files_dir = gc.get("files_dir") link = download_link binary_dir = binary_dir # version_pair : (major_version, minor_version) # e.g.: (8, 102) version_pair = version_pair if link != None: if self.tasks_pool.has_working_link(link): _send_dw_signal("_download_start", None, None) return # create new task and download inst, hash = _add_java_task(link, files_dir, binary_dir, version_pair) self.tasks_pool.add(hash, link) # start progress scheduler if not self.scheduler.running: self.scheduler.start() sch_job = self.scheduler.add_job(_schedule_get_progress, 'interval', seconds=1, args=[self, hash]) self.tasks_pool.update(hash, sch_job=sch_job) self.tasks_pool.update(hash, status=_utils.DOWNLOADING) _send_dw_signal("_download_start", hash, link) else: _send_dw_signal("_download_start", None, None) except: logger.error(traceback.format_exc())
def new_Minecraft_instance(uid, priv): ''' create a new MC Server instance. So How to create a new instance? 0. Name it. 1. Select the Server Core File (or upload it by user?) 2. Select Java Version 3. Set server.properties 4. Upload Mods & Plugins (If necessary) 5. Go For It! :return: ''' try: gc = GlobalConfig() # get all versions of java java_versions = [] java_versions_obj = db.session.query(JavaBinary).all() default_java_binary_id = int(gc.get("default_java_binary_id")) for item in java_versions_obj: _model = { "name": "1.%s.0_%s" % (item.major_version, item.minor_version), "index": item.id, "selected": "" } if item.id == default_java_binary_id: _model['selected'] = "selected" java_versions.append(_model) # get all info of server core server_cores = [] server_cores_obj = db.session.query(ServerCORE).all() for item in server_cores_obj: if item.core_version != None and item.core_version != "": _name = "%s-%s-%s" % (item.core_type, item.core_version, item.minecraft_version) else: _name = "%s-%s" % (item.core_type, item.minecraft_version) _model = {"name": _name, "index": item.core_id} server_cores.append(_model) # ...and generate an FTP account. user_name_obj = db.session.query(Users).filter(Users.id == uid).first() _safe_index = 0 while _safe_index < 30: _safe_index += 1 ftp_user_name = "%s_%s" % (user_name_obj.username, generate_random_string(3)) if db.session.query(FTPAccount).filter( FTPAccount.username == ftp_user_name).first() == None: break rtn_model = { "java_versions": java_versions, "server_cores": server_cores, "FTP_account_name": ftp_user_name } return rtn.success(rtn_model) except: return rtn.error(500)