Example #1
0
    def __init__(self, config=None, dbengine=None, **kwargs):

        self.config = config

        settings = dict(
            cookie_secret="12oETzKXQAGaYdkL5gEmGeJJFuYh7EQnp2XdTP1o/Vo=",
            login_url="/login",
            template_path=os.path.join(os.path.dirname(toughwlan.__file__), "views"),
            static_path=os.path.join(os.path.dirname(toughwlan.__file__), "static"),
            xsrf_cookies=True,
            config=self.config,
            debug=self.config.system.debug,
            xheaders=True,
        )

        self.cache = CacheManager(**parse_cache_config_options({
            'cache.type': 'file',
            'cache.data_dir': '/tmp/cache/data',
            'cache.lock_dir': '/tmp/cache/lock'
        }))

        self.tp_lookup = TemplateLookup(directories=[settings['template_path']],
                                        default_filters=['decode.utf8'],
                                        input_encoding='utf-8',
                                        output_encoding='utf-8',
                                        encoding_errors='replace',
                                        module_directory="/tmp/toughwlan")

        self.db_engine = dbengine
        self.db = scoped_session(sessionmaker(bind=self.db_engine, autocommit=False, autoflush=False))

        redisconf = redis_conf(config)
        self.session_manager = redis_session.SessionManager(redisconf,settings["cookie_secret"], 600)
        self.mcache = redis_cache.CacheManager(redisconf,cache_name='ToughWlanWeb-%s'%os.getpid())

        self.db_backup = DBBackup(models.get_metadata(self.db_engine), excludes=[
            'trw_online','system_session','system_cache'])

        self.aes = utils.AESCipher(key=self.config.system.secret)

        permit.add_route(cyclone.web.StaticFileHandler,
                         r"/backup/download/(.*)",
                         u"下载数据",
                         u"系统管理",
                         handle_params={"path": self.config.database.backup_path},
                         order=1.0405)

        handler_path = os.path.join(os.path.abspath(os.path.dirname(toughwlan.__file__)), "manage")
        load_handlers(handler_path=handler_path, pkg_prefix="toughwlan.manage",excludes=['views','httpd','ddns_task'])

        cyclone.web.Application.__init__(self, permit.all_handlers, **settings)
Example #2
0
def update(config, force=False):
    print "starting update database..."
    try:
        if int(os.environ.get("DB_INIT", 1)) == 1 or force:
            db_engine = get_engine(config)
            metadata = models.get_metadata(db_engine)
            metadata.drop_all(db_engine)
            metadata.create_all(db_engine)
            print "update database done"
            db = scoped_session(sessionmaker(bind=db_engine, autocommit=False, autoflush=True))()
            init_db(db)
    except:
        import traceback

        traceback.print_exc()
Example #3
0
def update(config, force=False):
    print 'starting update database...'
    try:
        if int(os.environ.get("DB_INIT", 1)) == 1 or force:
            db_engine = get_engine(config)
            metadata = models.get_metadata(db_engine)
            metadata.drop_all(db_engine)
            metadata.create_all(db_engine)
            print 'update database done'
            db = scoped_session(
                sessionmaker(bind=db_engine, autocommit=False,
                             autoflush=True))()
            init_db(db)
    except:
        import traceback
        traceback.print_exc()
Example #4
0
def restoredb(config,restorefs):
    if not os.path.exists(restorefs):
        print 'backup file not exists'
    else:
        engine = get_engine(config)
        db = engine.connect()
        metadata = models.get_metadata(engine)
        with gzip.open(restorefs,'rb') as rfs:
            cache_datas = {}
            for line in rfs:
                try:
                    tabname, rdata = json.loads(line)

                    if tabname == 'table_names' and rdata:
                        for table_name in rdata:
                            print "clean table %s" % table_name
                            db.execute("delete from %s;" % table_name)
                        continue

                    if tabname not in cache_datas:
                        cache_datas[tabname] = [rdata]
                    else:
                        cache_datas[tabname].append(rdata)

                    if tabname in cache_datas and len(cache_datas[tabname]) >= 500:
                        print 'insert datas<%s> into %s' % (len(cache_datas[tabname]), tabname)
                        db.execute(metadata.tables[tabname].insert().values(cache_datas[tabname]))
                        del cache_datas[tabname]

                except:
                    print 'error data %s ...'% line
                    import traceback
                    traceback.print_exc()

            print "insert last data"
            for tname, tdata in cache_datas.iteritems():
                try:
                    print 'insert datas<%s> into %s' % (len(tdata), tname)
                    db.execute(metadata.tables[tname].insert().values(tdata))
                except:
                    print 'error data %s ...' % tdata
                    import traceback
                    traceback.print_exc()

            cache_datas.clear()

        db.close()
Example #5
0
def update(config):
    print 'starting update database...'
    try:
        db_engine = get_engine(config)
        metadata = models.get_metadata(db_engine)
        metadata.drop_all(db_engine)
        metadata.create_all(db_engine)
        print 'update database done'
        db = scoped_session(sessionmaker(bind=db_engine, autocommit=False, autoflush=True))()
        init_db(db)
    except:
        print 'initdb error, retry wait 5 second'
        time.sleep(5.0)
        update(config)



        
Example #6
0
def dumpdb(config,dumpfs):
    _dir = os.path.split(dumpfs)[0]
    if not os.path.exists(_dir):
        os.makedirs(_dir)

    engine = get_engine(config)
    db = engine.connect()
    metadata = models.get_metadata(engine)
    with gzip.open(dumpfs, 'wb') as dumpfs:

        table_names = [_name for _name, _ in metadata.tables.items()]
        table_headers = ('table_names', table_names)
        dumpfs.write(json.dumps(table_headers, ensure_ascii=False).encode('utf-8'))
        dumpfs.write('\n')

        for _name,_table in metadata.tables.items():
            if _name in excludes:
                continue
            rows = db.execute(select([_table]))
            for rows in rows:
                obj = (_name, dict(rows.items()))
                dumpfs.write(json.dumps(obj,ensure_ascii=False).encode('utf-8'))
                dumpfs.write('\n')
    db.close()