def start(): global _check_times print("Running jobs in process %s..." % os.getpid()) dbname = config.get("database") schema = config.get("schema") if dbname: check_dbs = [dbname] else: dbnames = config.get("databases", "") check_dbs = [x.strip() for x in dbnames.split(",")] print("check_dbs", check_dbs) manager = Manager() t = datetime.now() _check_times = manager.dict({db: t for db in check_dbs}) for dbname in check_dbs: print("resetting jobs of db '%s'" % dbname) db = database.connect(dbname, schema) res = db.execute( "UPDATE cron_job SET state='waiting' WHERE state in ('running','error')" ) db.commit() job_pool = Pool(processes=int(config.get("job_processes"))) while 1: try: # print("_check_time",_check_times) t0 = datetime.now() t0_s = t0.strftime("%Y-%m-%d %H:%M:%S") for dbname, next_t in _check_times.items(): if next_t > t0: continue _check_times[dbname] = t0 + timedelta(seconds=60) print("Checking for scheduled jobs in database %s..." % dbname) db = database.connect(dbname, schema) db.begin() res = db.query( "SELECT * FROM cron_job WHERE state='waiting' ORDER BY date" ) db.commit() new_next_t = None for job in res: if job.date <= t0_s: job_pool.apply_async(run_job, [dbname, dict(job)]) else: new_next_t = datetime.strptime(job.date, "%Y-%m-%d %H:%M:%S") break if new_next_t and new_next_t < _check_times[dbname]: _check_times[dbname] = new_next_t except Exception as e: import traceback traceback.print_exc() print("WARNING: failed to check for jobs: %s" % e) time.sleep(1)
def get_show_dbs(self, context={}): if config.get("database"): return False elif config.get("database_from_domain"): request = context["request"] host = request.host subdom = host.split(".", 1)[0] if subdom not in ("all", "clients"): # XXX return False elif config.get("database_from_http_header"): return False return True
def get_databases(self, context={}): db_list = sorted(database.list_databases()) if config.get("sub_server"): request = context["request"] host = request.host i = host.find(".my.netforce.com") if i == -1: raise Exception("Invalid host") db_name = host[:i].replace("-", "_") db_list = [db_name] elif config.get("database"): db_list = [config.get("database")] return [(x, x) for x in db_list]
def create_db(self, context={}): data = context["data"] if data["super_password"] != config.get("super_password"): raise Exception("Invalid super admin password") db_name = data["db_name"] admin_password = data["admin_password"] use_demo = data.get("use_demo") if use_demo: base_sql = pkg_resources.resource_string( "netforce_general", "data/base_demo.sql").decode() else: base_sql = pkg_resources.resource_string("netforce_general", "data/base.sql").decode() print("creating db...") db = database.connect("template1") db._db.set_isolation_level(0) db.execute("CREATE DATABASE %s" % db_name) db.close() print("initializing db...") db = database.connect(db_name) db.execute(base_sql) db.execute( "UPDATE base_user SET name='Admin',login='******',password=%s WHERE id=1", admin_password) db.commit() print("done!") return { "next": { "name": "manage_db" }, "flash": "Database created successfully", }
def open(self): print("Listen.open", os.getpid()) global sending_notifs t = time.strftime("%Y-%m-%d %H:%M:%S") dbname = config.get("database") if not dbname: dbname = self.get_cookie("dbname", None) if not dbname: raise Exception("Can't open websocket, missing dbname") set_active_db(dbname) db = get_connection() user_id = self.get_cookie("user_id", None) if user_id: user_id = int(user_id) res = db.get( "INSERT INTO ws_listener (user_id,last_check_time) VALUES (%s,%s) RETURNING id", user_id, t) self.listener_id = res.id self.dbname = dbname listen_handlers[self.listener_id] = self if not sending_notifs: io_loop = tornado.ioloop.IOLoop.instance() io_loop.add_timeout( time.time() + POLL_WAIT, send_notifs ) # XXX: should start this directly when process is started? sending_notifs = True db.commit()
def create_db(self, context={}): data = context["data"] if data["super_password"] != config.get("super_password"): raise Exception("Invalid super admin password") db_name = data["db_name"] admin_password = data["admin_password"] use_demo = data.get("use_demo") if use_demo: base_sql = pkg_resources.resource_string("netforce_general", "data/base_demo.sql").decode() else: base_sql = pkg_resources.resource_string("netforce_general", "data/base.sql").decode() print("creating db...") db = database.connect("template1") db._db.set_isolation_level(0) db.execute("CREATE DATABASE %s" % db_name) db.close() print("initializing db...") db = database.connect(db_name) db.execute(base_sql) db.execute("UPDATE base_user SET name='Admin',login='******',password=%s WHERE id=1", admin_password) db.commit() print("done!") return { "next": { "name": "manage_db" }, "flash": "Database created successfully", }
def get_db_name(self, context={}): if config.get("database"): dbname = config.get("database") return dbname elif config.get("database_from_domain"): request = context["request"] host = request.host subdom = host.split(".", 1)[0] if subdom not in ("all", "clients"): # XXX dbname = subdom.replace("-", "_") return dbname elif config.get("database_from_http_header"): request = context["request"] dbname = request.headers.get("X-Database") return dbname return None
def copy_db(self, context={}): data = context["data"] if data["super_password"] != config.get("super_password"): raise Exception("Invalid super admin password") dbname = data["dbname"] new_dbname = data["new_dbname"] print("copying db...") # database.close_connections(dbname); db = database.connect("template1") db._db.set_isolation_level(0) db.execute( "SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname=%s", dbname) db.execute("CREATE DATABASE %s WITH TEMPLATE %s" % (new_dbname, dbname)) db.close() print("copying files...") fdir1 = os.path.join("static", "db", dbname, "files") if os.path.exists(fdir1): fdir2 = os.path.join("static", "db", new_dbname, "files") if not os.path.exists(fdir2): os.makedirs(fdir2) for f in os.listdir(fdir1): f1 = os.path.join(fdir1, f) f2 = os.path.join(fdir2, f) shutil.copy(f1, f2) return { "next": { "name": "login" }, "flash": "Database copied successfully", }
def copy_db(self, context={}): data = context["data"] if data["super_password"] != config.get("super_password"): raise Exception("Invalid super admin password") dbname = data["dbname"] new_dbname = data["new_dbname"] print("copying db...") # database.close_connections(dbname); db = database.connect("template1") db._db.set_isolation_level(0) db.execute("SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname=%s", dbname) db.execute("CREATE DATABASE %s WITH TEMPLATE %s" % (new_dbname, dbname)) db.close() print("copying files...") fdir1 = os.path.join("static", "db", dbname, "files") if os.path.exists(fdir1): fdir2 = os.path.join("static", "db", new_dbname, "files") if not os.path.exists(fdir2): os.makedirs(fdir2) for f in os.listdir(fdir1): f1 = os.path.join(fdir1, f) f2 = os.path.join(fdir2, f) shutil.copy(f1, f2) return { "next": { "name": "login" }, "flash": "Database copied successfully", }
def start(): global _check_times print("Running jobs in process %s..."%os.getpid()) dbname = config.get("database") schema = config.get("schema") if dbname: check_dbs = [dbname] else: dbnames = config.get("databases", "") check_dbs = [x.strip() for x in dbnames.split(",")] print("check_dbs", check_dbs) manager = Manager() t = datetime.now() _check_times = manager.dict({db: t for db in check_dbs}) for dbname in check_dbs: print("resetting jobs of db '%s'"%dbname) db=database.connect(dbname,schema) res=db.execute("UPDATE cron_job SET state='waiting' WHERE state in ('running','error')") db.commit() job_pool = Pool(processes=int(config.get("job_processes"))) while 1: try: # print("_check_time",_check_times) t0 = datetime.now() t0_s = t0.strftime("%Y-%m-%d %H:%M:%S") for dbname, next_t in _check_times.items(): if next_t > t0: continue _check_times[dbname] = t0 + timedelta(seconds=60) print("Checking for scheduled jobs in database %s..." % dbname) db = database.connect(dbname,schema) db.begin() res = db.query("SELECT * FROM cron_job WHERE state='waiting' ORDER BY date") db.commit() new_next_t = None for job in res: if job.date <= t0_s: job_pool.apply_async(run_job, [dbname, dict(job)]) else: new_next_t = datetime.strptime(job.date, "%Y-%m-%d %H:%M:%S") break if new_next_t and new_next_t < _check_times[dbname]: _check_times[dbname] = new_next_t except Exception as e: import traceback; traceback.print_exc() print("WARNING: failed to check for jobs: %s"%e) time.sleep(1)
def get_database(self,params={},context={}): all_db, dbs = list_databases(),[] for db in all_db:dbs.append({'name' : db}) a_db=True if config.get("database") else False return { 'dbs' : dbs, 'a_db' : a_db }
def get_databases(self, context={}): if config.get("database"): dbname = config.get("database") return [(dbname, dbname)] elif config.get("database_from_domain"): request = context["request"] host = request.host subdom = host.split(".", 1)[0] if subdom not in ("all", "clients"): # XXX dbname = subdom.replace("-", "_") return [(dbname, dbname)] elif config.get("database_from_http_header"): request = context["request"] dbname = request.headers.get("X-Database") return [(dbname, dbname)] db_list = sorted(database.list_databases()) return [(x, x) for x in db_list]
def login(self,params={},context={}): print('login.params ', params) login, password =params['login'], params['password'] dbname = params.get("database") or config.get("database") context['data']={ 'db_name': dbname, 'login': login, 'password': password, } res=get_model('login').login(context=context) print("="*80) print(res) print("="*80) return res
def upgrade_db(self, context={}): data = context["data"] if data["super_password"] != config.get("super_password"): raise Exception("Invalid super admin password") dbname = data["dbname"] database.set_active_db(dbname) from_version = get_db_version() update_db(force=True) apply_migrations(from_version=from_version) return { "next": { "name": "manage_db" }, "flash": "Database upgrade successfully", }
def delete_db(self, context={}): data = context["data"] if data["super_password"] != config.get("super_password"): raise Exception("Invalid super admin password") dbname = data["dbname"] print("deleting db...") db = database.connect("template1") db._db.set_isolation_level(0) db.execute("SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname=%s", dbname) db.execute("DROP DATABASE %s" % dbname) db.close() return { "next": { "name": "login" }, "flash": "Database deleted successfully", }
def delete_db(self, context={}): data = context["data"] if data["super_password"] != config.get("super_password"): raise Exception("Invalid super admin password") dbname = data["dbname"] print("deleting db...") db = database.connect("template1") db._db.set_isolation_level(0) db.execute( "SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname=%s", dbname) db.execute("DROP DATABASE %s" % dbname) db.close() return { "next": { "name": "login" }, "flash": "Database deleted successfully", }
def get(self): url=None db=get_connection() try: if db: res=db.get("SELECT root_url FROM settings WHERE id=1") url=res.root_url or config.get("root_url") if url: self.redirect(url) return user_id=access.get_active_user() action=None if user_id: user=get_model("base.user").browse(user_id) profile=user.profile_id action=profile.home_action if action: self.redirect("/ui#name=%s"%action) return self.redirect("/ui#name=login") finally: db.commit()
def open(self): print("Listen.open",os.getpid()) global sending_notifs t=time.strftime("%Y-%m-%d %H:%M:%S") dbname=config.get("database") if not dbname: dbname=self.get_cookie("dbname",None) if not dbname: raise Exception("Can't open websocket, missing dbname") set_active_db(dbname) db=get_connection() user_id=self.get_cookie("user_id",None) if user_id: user_id=int(user_id) res=db.get("INSERT INTO ws_listener (user_id,last_check_time) VALUES (%s,%s) RETURNING id",user_id,t) self.listener_id=res.id self.dbname=dbname listen_handlers[self.listener_id]=self if not sending_notifs: io_loop=tornado.ioloop.IOLoop.instance() io_loop.add_timeout(time.time()+POLL_WAIT,send_notifs) # XXX: should start this directly when process is started? sending_notifs=True db.commit()