def restoredb(config,restorefs): if not os.path.exists(restorefs): print 'backup file not exists' else: engine = get_engine(config) db = engine.connect() metadata = models.get_metadata(engine) with gzip.open(restorefs,'rb') as rfs: cache_datas = {} for line in rfs: try: tabname, rdata = json.loads(line) if tabname == 'table_names' and rdata: for table_name in rdata: print "clean table %s" % table_name db.execute("delete from %s;" % table_name) continue if tabname not in cache_datas: cache_datas[tabname] = [rdata] else: cache_datas[tabname].append(rdata) if tabname in cache_datas and len(cache_datas[tabname]) >= 500: print 'insert datas<%s> into %s' % (len(cache_datas[tabname]), tabname) db.execute(metadata.tables[tabname].insert().values(cache_datas[tabname])) del cache_datas[tabname] except: print 'error data %s ...'% line import traceback traceback.print_exc() print "insert last data" for tname, tdata in cache_datas.iteritems(): try: print 'insert datas<%s> into %s' % (len(tdata), tname) db.execute(metadata.tables[tname].insert().values(tdata)) except: print 'error data %s ...' % tdata import traceback traceback.print_exc() cache_datas.clear() db.close()
def dumpdb(config,dumpfs): _dir = os.path.split(dumpfs)[0] if not os.path.exists(_dir): os.makedirs(_dir) engine = get_engine(config) db = engine.connect() metadata = models.get_metadata(engine) with gzip.open(dumpfs, 'wb') as dumpfs: table_names = [_name for _name, _ in metadata.tables.items()] table_headers = ('table_names', table_names) dumpfs.write(json.dumps(table_headers, ensure_ascii=False).encode('utf-8')) dumpfs.write('\n') for _name,_table in metadata.tables.items(): if _name in excludes: continue rows = db.execute(select([_table])) for rows in rows: obj = (_name, dict(rows.items())) dumpfs.write(json.dumps(obj,ensure_ascii=False).encode('utf-8')) dumpfs.write('\n') db.close()
def run_initdb(config): init_db.update(get_engine(config))
def __init__(self, config=None, **kwargs): self.config = config self.syslog = logger.Logger(config) hosts = [h.split(":") for h in config.memcached.hosts.split(",")] hosts = [(h, int(p)) for h, p in hosts] self.mcache = YamClient(hosts) try: if 'TZ' not in os.environ: os.environ["TZ"] = self.config.defaults.tz time.tzset() except: pass settings = dict( cookie_secret="12oETzKXQAGaYdkL5gEmGeJJFuYh7EQnp2XdTP1o/Vo=", session_secret="12oETzKXQAGaYdkL5gEmGeJJFuYh7EQnp2XdTP1o/Vo=", login_url="/admin/login", template_path=os.path.join(os.path.dirname(__file__), "views"), static_path=os.path.join(os.path.dirname(__file__), "static"), xsrf_cookies=True, config=config, debug=self.config.defaults.debug, xheaders=True, ) self.session_manager = session.SessionManager(settings["session_secret"], hosts, 600) self.cache = CacheManager(**parse_cache_config_options({ 'cache.type': 'ext:memcached', 'cache.url': self.config.memcached.hosts, })) self.tp_lookup = TemplateLookup(directories=[settings['template_path']], default_filters=['decode.utf8'], input_encoding='utf-8', output_encoding='utf-8', encoding_errors='replace', module_directory="/tmp/admin") self.db_engine = get_engine(config) self.db = scoped_session(sessionmaker(bind=self.db_engine, autocommit=False, autoflush=False)) self.zauth_agent = authorize.ZAuthAgent(self) self.zacct_agent = acctounting.ZAcctAgent(self) self.aes = utils.AESCipher(key=self.config.defaults.secret) permit.add_route(cyclone.web.StaticFileHandler, r"/backup/download/(.*)", u"下载数据", MenuSys, handle_params={"path": self.config.database.backup_path}, order=1.0405) self.init_route() cyclone.web.Application.__init__(self, permit.all_handlers, **settings)