Beispiel #1
0
 def __init__(self, taskd, **kwargs):
     TaseBasic.__init__(self, taskd, **kwargs)
     self.db_backup = DBBackup(models.get_metadata(taskd.db_engine),
                               excludes=[
                                   'tr_online', 'system_session',
                                   'system_cache', 'tr_ticket',
                                   'tr_billing', 'tr_online_stat',
                                   'tr_flow_stat'
                               ])
Beispiel #2
0
class BackupDataTask(TaseBasic):

    __name__ = 'db-backup'

    def __init__(self, taskd, **kwargs):
        TaseBasic.__init__(self, taskd, **kwargs)
        self.db_backup = DBBackup(models.get_metadata(taskd.db_engine),
                                  excludes=[
                                      'tr_online', 'system_session',
                                      'system_cache', 'tr_ticket',
                                      'tr_billing', 'tr_online_stat',
                                      'tr_flow_stat'
                                  ])

    def get_notify_interval(self):
        return utils.get_cron_interval('02:00')

    def first_delay(self):
        return self.get_notify_interval()

    def process(self, *args, **kwargs):
        self.logtimes()
        next_interval = self.get_notify_interval()
        backup_path = self.config.database.backup_path
        backup_file = "trdb_cron_backup_%s.json.gz" % utils.gen_backep_id()
        try:
            self.db_backup.dumpdb(os.path.join(backup_path, backup_file))
            logger.info(u"数据备份完成,下次执行还需等待 %s" %
                        (self.format_time(next_interval)),
                        trace="task")
        except Exception as err:
            logger.info(u"数据备份失败,%s, 下次执行还需等待 %s" %
                        (repr(err), self.format_time(next_interval)),
                        trace="task")
            logger.exception(err)

        try:
            bak_list = [
                bd for bd in os.listdir(backup_path)
                if 'trdb_cron_backup' in bd
            ]
            if len(bak_list) > 7:
                logger.info("find expire backup file and remove")
                _count = 0
                for fname in bak_list:
                    fpath = os.path.join(backup_path, fname)
                    if (time.time() - os.path.getctime(fpath)) / (3600 *
                                                                  24) > 14:
                        os.remove(fpath)
                        _count += 1
                        logger.debug("remove expire backup file %s" % fpath)
                logger.info("remove expire backup file total %s" % _count,
                            trace="task")
        except Exception as err:
            logger.exception(err)

        return next_interval
Beispiel #3
0
class BackupDataTask(TaseBasic):

    __name__ = 'db-backup'    

    def __init__(self,taskd, **kwargs):
        TaseBasic.__init__(self,taskd, **kwargs)
        self.db_backup = DBBackup(models.get_metadata(taskd.db_engine), excludes=[
            'tr_online','system_session','system_cache','tr_ticket','tr_billing','tr_online_stat','tr_flow_stat'
        ])

    def get_notify_interval(self):
        return utils.get_cron_interval('02:00')

    def first_delay(self):
        return self.get_notify_interval()

    def process(self, *args, **kwargs):
        self.logtimes()
        next_interval = self.get_notify_interval()
        backup_path = self.config.database.backup_path
        backup_file = "trdb_cron_backup_%s.json.gz" % utils.gen_backep_id()
        try:
            self.db_backup.dumpdb(os.path.join(backup_path, backup_file))
            logger.info(u"数据备份完成,下次执行还需等待 %s"%(self.format_time(next_interval)),trace="task")
        except Exception as err:
            logger.info(u"数据备份失败,%s, 下次执行还需等待 %s"%( repr(err), self.format_time(next_interval)),trace="task")
            logger.exception(err)

        try:
            bak_list = [ bd for bd in os.listdir(backup_path) if 'trdb_cron_backup' in bd]
            if len(bak_list) > 7:
                logger.info("find expire backup file and remove")
                _count = 0
                for fname in bak_list:
                    fpath = os.path.join(backup_path, fname)
                    if (time.time() - os.path.getctime(fpath))/(3600*24)  > 14:
                        os.remove(fpath)
                        _count += 1
                        logger.debug("remove expire backup file %s"%fpath)
                logger.info("remove expire backup file total %s"%_count,trace="task")
        except Exception as err:
            logger.exception(err)
            
        return next_interval
Beispiel #4
0
    def __init__(self, config=None, dbengine=None, **kwargs):

        self.config = config

        settings = dict(
            cookie_secret="12oETzKXQAGaYdkL5gEmGeJJFuYh7EQnp2XdTP1o/Vo=",
            login_url="/admin/login",
            template_path=os.path.join(os.path.dirname(toughradius.__file__), "views"),
            static_path=os.path.join(os.path.dirname(toughradius.__file__), "static"),
            xsrf_cookies=True,
            config=self.config,
            debug=self.config.system.debug,
            xheaders=True,
        )

        self.tp_lookup = TemplateLookup(
            directories=[settings['template_path']],
            default_filters=['decode.utf8'],
            input_encoding='utf-8',
            output_encoding='utf-8',
            encoding_errors='ignore',
            module_directory="/tmp/toughradius_admin"
        )

        self.db_engine = dbengine or get_engine(config)
        self.db = scoped_session(sessionmaker(bind=self.db_engine, autocommit=False, autoflush=False))
        self.session_manager = session.SessionManager(settings["cookie_secret"], self.db_engine, 600)

        redisconf = redis_conf(config)
        self.session_manager = redis_session.SessionManager(redisconf,settings["cookie_secret"], 600)
        self.mcache = redis_cache.CacheManager(redisconf,cache_name='RadiusManageCache-%s'%os.getpid())
        self.mcache.print_hit_stat(60)
        
        self.db_backup = DBBackup(models.get_metadata(self.db_engine), excludes=[
            'tr_online','system_session','system_cache','tr_ticket'])

        self.aes = utils.AESCipher(key=self.config.system.secret)

        # cache event init
        dispatch.register(self.mcache)

        # app init_route
        load_handlers(handler_path=os.path.join(os.path.abspath(os.path.dirname(__file__))),
            pkg_prefix="toughradius.manage", excludes=['views','webserver','radius'])

        # app event init
        event_params= dict(dbengine=self.db_engine, mcache=self.mcache, aes=self.aes)
        dispatch.load_events(os.path.join(os.path.abspath(os.path.dirname(toughradius.manage.events.__file__))),
            "toughradius.manage.events",event_params=event_params)

        permit.add_route(cyclone.web.StaticFileHandler, 
                            r"/admin/backup/download/(.*)",
                            u"下载数据",MenuSys, 
                            handle_params={"path": self.config.database.backup_path},
                            order=5.0005)
        cyclone.web.Application.__init__(self, permit.all_handlers, **settings)
Beispiel #5
0
    def __init__(self, config=None, log=None, **kwargs):

        self.config = config
        self.syslog = log or logger.Logger(config)

        settings = dict(
            cookie_secret="12oETzKXQAGaYdkL5gEmGeJJFuYh7EQnp2XdTP1o/Vo=",
            login_url="/admin/login",
            template_path=os.path.join(os.path.dirname(__file__), "views"),
            static_path=os.path.join(os.path.dirname(toughradius.__file__),
                                     "static"),
            xsrf_cookies=True,
            config=config,
            debug=self.config.system.debug,
            xheaders=True,
        )

        self.tp_lookup = TemplateLookup(
            directories=[settings['template_path']],
            default_filters=['decode.utf8'],
            input_encoding='utf-8',
            output_encoding='utf-8',
            encoding_errors='replace',
            module_directory="/tmp/admin")

        self.db_engine = get_engine(config)
        self.db = scoped_session(
            sessionmaker(bind=self.db_engine,
                         autocommit=False,
                         autoflush=False))
        self.session_manager = session.SessionManager(
            settings["cookie_secret"], self.db_engine, 600)
        self.mcache = cache.CacheManager(self.db_engine)
        self.db_backup = DBBackup(models.get_metadata(self.db_engine),
                                  excludes=[
                                      'tr_online', 'system_session',
                                      'system_cache', 'tr_ticket'
                                  ])

        self.aes = utils.AESCipher(key=self.config.system.secret)

        permit.add_route(
            cyclone.web.StaticFileHandler,
            r"/backup/download/(.*)",
            u"下载数据",
            MenuSys,
            handle_params={"path": self.config.database.backup_path},
            order=1.0405)

        self.init_route()

        self.zauth_agent = authorize.ZAuthAgent(self)
        self.zacct_agent = acctounting.ZAcctAgent(self)

        cyclone.web.Application.__init__(self, permit.all_handlers, **settings)
Beispiel #6
0
    def __init__(self, config=None, dbengine=None, **kwargs):

        self.config = config

        settings = dict(
            cookie_secret="12oETzKXQAGaYdkL5gEmGeJJFuYh7EQnp2XdTP1o/Vo=",
            login_url="/login",
            template_path=os.path.join(os.path.dirname(toughwlan.__file__), "views"),
            static_path=os.path.join(os.path.dirname(toughwlan.__file__), "static"),
            xsrf_cookies=True,
            config=self.config,
            debug=self.config.system.debug,
            xheaders=True,
        )

        self.cache = CacheManager(**parse_cache_config_options({
            'cache.type': 'file',
            'cache.data_dir': '/tmp/cache/data',
            'cache.lock_dir': '/tmp/cache/lock'
        }))

        self.tp_lookup = TemplateLookup(directories=[settings['template_path']],
                                        default_filters=['decode.utf8'],
                                        input_encoding='utf-8',
                                        output_encoding='utf-8',
                                        encoding_errors='replace',
                                        module_directory="/tmp/toughwlan")

        self.db_engine = dbengine
        self.db = scoped_session(sessionmaker(bind=self.db_engine, autocommit=False, autoflush=False))

        redisconf = redis_conf(config)
        self.session_manager = redis_session.SessionManager(redisconf,settings["cookie_secret"], 600)
        self.mcache = redis_cache.CacheManager(redisconf,cache_name='ToughWlanWeb-%s'%os.getpid())

        self.db_backup = DBBackup(models.get_metadata(self.db_engine), excludes=[
            'trw_online','system_session','system_cache'])

        self.aes = utils.AESCipher(key=self.config.system.secret)

        permit.add_route(cyclone.web.StaticFileHandler,
                         r"/backup/download/(.*)",
                         u"下载数据",
                         u"系统管理",
                         handle_params={"path": self.config.database.backup_path},
                         order=1.0405)

        handler_path = os.path.join(os.path.abspath(os.path.dirname(toughwlan.__file__)), "manage")
        load_handlers(handler_path=handler_path, pkg_prefix="toughwlan.manage",excludes=['views','httpd','ddns_task'])

        cyclone.web.Application.__init__(self, permit.all_handlers, **settings)
Beispiel #7
0
 def __init__(self,taskd, **kwargs):
     TaseBasic.__init__(self,taskd, **kwargs)
     self.db_backup = DBBackup(models.get_metadata(taskd.db_engine), excludes=[
         'tr_online','system_session','system_cache','tr_ticket','tr_billing','tr_online_stat','tr_flow_stat'
     ])