Esempio n. 1
0
 def __init__(self, config=None, dbengine=None, **kwargs):
     self.config = config
     self.db_engine = dbengine or get_engine(config, pool_size=20)
     self.aes = kwargs.pop('aes', None)
     redisconf = redis_conf(config)
     self.cache = kwargs.pop(
         'cache',
         CacheManager(redisconf,
                      cache_name='RadiusTaskCache-%s' % os.getpid()))
     self.db = scoped_session(
         sessionmaker(bind=self.db_engine,
                      autocommit=False,
                      autoflush=False))
     self.load_tasks()
     if not kwargs.get('standalone'):
         logger.info('start register taskd events')
         dispatch.register(log_trace.LogTrace(redis_conf(config)),
                           check_exists=True)
         if 'elasticsearch' in config:
             dispatch.register(eslogapi.EslogApi(config.elasticsearch))
         event_params = dict(dbengine=self.db_engine,
                             mcache=self.cache,
                             aes=self.aes)
         event_path = os.path.abspath(os.path.dirname(events.__file__))
         dispatch.load_events(event_path,
                              'taurusxradius.modules.events',
                              event_params=event_params)
     return
Esempio n. 2
0
 def __init__(self, config, dbengine, **kwargs):
     self.config = config
     self.que = deque()
     self.db_engine = dbengine or get_engine(config, pool_size=30)
     self.cache = CacheManager(redis_conf(config),
                               cache_name='SyncdCache-%s' % os.getpid())
     self.metadata = models.get_metadata(self.db_engine)
     self.tables = {
         _name: _table
         for _name, _table in self.metadata.tables.items()
     }
     self.master_bind = ZmqREPConnection(
         ZmqFactory(), ZmqEndpoint('bind', config.ha.master))
     self.master_bind.gotMessage = self.dataReceived
     self.sync_task = HaSyncTask(config, self.db_engine, self.cache)
     self.ops = {
         'add': self.do_add,
         'update': self.do_update,
         'delete': self.do_delete
     }
     self.process()
     logger.info(u'启动HA同步服务: [Master] {} ~ [Slave] {}'.format(
         self.config.ha.master, self.config.ha.slave))
     if not kwargs.get('standalone'):
         self.logtrace = log_trace.LogTrace(redis_conf(config))
         if 'elasticsearch' in config:
             dispatch.register(eslogapi.EslogApi(config.elasticsearch))
Esempio n. 3
0
def upgrade(config):
    """ 数据库升级,保留日志数据
    """
    try:
        db_engine = get_engine(config)
        metadata = models.get_metadata(db_engine)
        batchsize = 32 if config.database.dbtype == 'sqlite' else 500
        db_backup = DBBackup(metadata, excludes=['tr_online',
                                                 'system_session',
                                                 'system_cache',
                                                 'tr_ticket',
                                                 'tr_billing'], batchsize=batchsize)
        backup_path = config.database.backup_path
        backup_file = 'taurusxr_upgrade_%s.json.gz' % utils.get_currdate()
        backupfs = os.path.join(backup_path, backup_file)
        if not os.path.exists(backupfs):
            raise RuntimeError('please backup old database first!')
        print 'starting upgrade database...'
        tables = [ v for k, v in metadata.tables.iteritems() if k not in ('tr_ticket', 'tr_billing') ]
        metadata.drop_all(db_engine, tables=tables)
        metadata.create_all(db_engine, tables=tables)
        print 'upgrade database done'
        print 'start restore database from %s...' % backupfs
        db_backup.restoredb(backupfs)
        print 'restore database done'
    except:
        import traceback
        traceback.print_exc()
Esempio n. 4
0
 def __init__(self, config = None, dbengine = None, **kwargs):
     self.config = config
     settings = dict(cookie_secret='12oETzKXQAGaYdkL5gEmGeJJFuYh7EQnp2XdTP1o/Vo=', login_url='/admin/login', template_path=os.path.join(os.path.dirname(taurusxradius.__file__), 'views'), static_path=os.path.join(os.path.dirname(taurusxradius.__file__), 'static'), xsrf_cookies=True, config=self.config, debug=self.config.system.debug, xheaders=True)
     self.tp_lookup = TemplateLookup(directories=[settings['template_path']], default_filters=['decode.utf8', 'h'], input_encoding='utf-8', output_encoding='utf-8', encoding_errors='ignore', module_directory='/var/taurusxr/free_module_manage')
     self.license = storage.Storage(dict(sid=tools.get_sys_uuid(), type='taurusxee', create_at='2017-01-01', expire='3000-12-30'))
     os.environ['LICENSE_TYPE'] = 'taurusxee'
     self.db_engine = dbengine or get_engine(config)
     self.db = scoped_session(sessionmaker(bind=self.db_engine, autocommit=False, autoflush=False))
     redisconf = redis_conf(config)
     self.logtrace = log_trace.LogTrace(redisconf)
     self.session_manager = redis_session.SessionManager(redisconf, settings['cookie_secret'], 3600)
     self.mcache = kwargs.get('cache', CacheManager(redisconf, cache_name='RadiusManageCache-%s' % os.getpid()))
     batchsize = 32 if self.config.database.dbtype == 'sqlite' else 500
     self.db_backup = DBBackup(models.get_metadata(self.db_engine), excludes=['tr_online',
      'system_session',
      'system_cache',
      'tr_ticket',
      'tr_billing'], batchsize=batchsize)
     self.aes = kwargs.get('aes', utils.AESCipher(key=self.config.system.secret))
     self.init_superrpc()
     dispatch.register(self.mcache)
     dispatch.register(self.logtrace)
     if 'elasticsearch' in self.config:
         dispatch.register(eslogapi.EslogApi(self.config.elasticsearch))
     self.mpsapi = None
     self.wechat = None
     self.init_handlers()
     self.init_events()
     cyclone.web.Application.__init__(self, permit.all_handlers, **settings)
     return
Esempio n. 5
0
 def init(self):
     _dir = os.path.dirname(__file__)
     self.config = iconfig.find_config(os.path.join(_dir, 'test.json'))
     logger.Logger(self.config, 'taurusxee-test')
     self.dbengine = get_engine(self.config)
     self.db = scoped_session(
         sessionmaker(bind=self.dbengine, autocommit=False, autoflush=True))
     self.aes = utils.AESCipher(key=self.config.system.secret)
Esempio n. 6
0
def add_column(config, tablename, column, ctype = 'VARCHAR', defval = ''):
    try:
        db_engine = get_engine(config)
        sqlstr = u"ALTER TABLE {0} ADD COLUMN {1} {2} DEFAULT '{3}';"
        sqlstr = sqlstr.format(tablename, column, ctype, defval)
        with db_engine.begin() as conn:
            conn.execute(_sql(sqlstr))
    except Exception as e:
        import traceback
        traceback.print_exc()
Esempio n. 7
0
def show_tables(config):
    try:
        db_engine = get_engine(config)
        metadata = models.get_metadata(db_engine)
        for tname, table in metadata.tables.items():
            print tname

    except Exception as e:
        import traceback
        traceback.print_exc()
Esempio n. 8
0
def build_tables(config):
    try:
        db_engine = get_engine(config)
        print 'starting build tables...'
        metadata = models.get_metadata(db_engine)
        metadata.drop_all(db_engine)
        metadata.create_all(db_engine)
        print 'build table done'
    except:
        import traceback
        traceback.print_exc()
Esempio n. 9
0
def drop_table(config, table_name):
    try:
        print 'starting drop table %s' % table_name
        db_engine = get_engine(config)
        metadata = models.get_metadata(db_engine)
        for tname, table in metadata.tables.items():
            if tname == table_name:
                table.drop(db_engine)
                print 'drop table %s done' % table_name
                break

    except Exception as e:
        import traceback
        traceback.print_exc()
Esempio n. 10
0
def update(config, force = False):
    try:
        db_engine = get_engine(config)
        if int(os.environ.get('DB_INIT', 1)) == 1 or force:
            print 'starting update database...'
            metadata = models.get_metadata(db_engine)
            metadata.drop_all(db_engine)
            metadata.create_all(db_engine)
            print 'update database done'
            db = scoped_session(sessionmaker(bind=db_engine, autocommit=False, autoflush=True))()
            init_db(db)
    except:
        import traceback
        traceback.print_exc()
Esempio n. 11
0
 def __init__(self, config=None, dbengine=None, **kwargs):
     self.config = config
     os.environ['LICENSE_TYPE'] = 'taurusxee'
     settings = dict(
         cookie_secret='12oETzKXQAGaYdkL5gEmGeJJFuYh7EQnp2XdTP1o/Vo=',
         login_url='/usrportal/login',
         template_path=os.path.join(os.path.dirname(taurusxradius.__file__),
                                    'views'),
         static_path=os.path.join(os.path.dirname(taurusxradius.__file__),
                                  'static'),
         xsrf_cookies=True,
         config=self.config,
         debug=self.config.system.debug,
         xheaders=True)
     self.tp_lookup = TemplateLookup(
         directories=[settings['template_path']],
         default_filters=['decode.utf8', 'h'],
         input_encoding='utf-8',
         output_encoding='utf-8',
         encoding_errors='ignore',
         module_directory='/var/taurusxr/module_usrportal')
     self.db_engine = dbengine or get_engine(config)
     self.db = scoped_session(
         sessionmaker(bind=self.db_engine,
                      autocommit=False,
                      autoflush=False))
     redisconf = redis_conf(config)
     self.logtrace = log_trace.LogTrace(redisconf)
     self.session_manager = redis_session.SessionManager(
         redisconf, settings['cookie_secret'], 3600)
     self.mcache = kwargs.get(
         'cache',
         CacheManager(redisconf, cache_name='UsrpdCache-%s' % os.getpid()))
     self.paycache = CacheManager(redisconf,
                                  cache_name='UsrpdPayCache-%s' %
                                  os.getpid(),
                                  db=9)
     self.aes = kwargs.get('aes',
                           utils.AESCipher(key=self.config.system.secret))
     logger.info('start register usrportal events')
     dispatch.register(self.mcache)
     dispatch.register(self.logtrace)
     if 'elasticsearch' in config:
         dispatch.register(eslogapi.EslogApi(config.elasticsearch))
     load_handlers(handler_path=os.path.join(
         os.path.abspath(os.path.dirname(__file__)), 'usrportal'),
                   pkg_prefix='taurusxradius.modules.usrportal',
                   excludes=['webserver', 'radius'])
     cyclone.web.Application.__init__(self, permit.all_handlers, **settings)
Esempio n. 12
0
 def __init__(self, config, dbengine, radcache = None):
     self.config = config
     self.load_plugins(load_types=['radius_auth_req', 'radius_accept'])
     self.dict = dictionary.Dictionary(os.path.join(os.path.dirname(taurusxradius.__file__), 'dictionarys/dictionary'))
     self.db_engine = dbengine or get_engine(config)
     self.aes = utils.AESCipher(key=self.config.system.secret)
     self.mcache = radcache
     self.reject_debug = int(self.get_param_value('radius_reject_debug', 0)) == 1
     self.pusher = ZmqPushConnection(ZmqFactory(), ZmqEndpoint('connect', config.mqproxy['auth_result']))
     self.stat_pusher = ZmqPushConnection(ZmqFactory(), ZmqEndpoint('connect', config.mqproxy['task_connect']))
     self.puller = ZmqPullConnection(ZmqFactory(), ZmqEndpoint('connect', config.mqproxy['auth_message']))
     self.puller.onPull = self.process
     logger.info('radius auth worker %s start' % os.getpid())
     logger.info('init auth worker pusher : %s ' % self.pusher)
     logger.info('init auth worker puller : %s ' % self.puller)
     logger.info('init auth stat pusher : %s ' % self.stat_pusher)
     self.license_ulimit = 50000
Esempio n. 13
0
 def __init__(self, config, dbengine, radcache = None):
     self.config = config
     self.load_plugins(load_types=['radius_auth_req', 'radius_accept'])
     self.dict = dictionary.Dictionary(os.path.join(os.path.dirname(taurusxradius.__file__), 'dictionarys/dictionary'))
     self.db_engine = dbengine or get_engine(config)
     self.aes = utils.AESCipher(key=self.config.system.secret)
     self.mcache = radcache
     self.stat_pusher = ZmqPushConnection(ZmqFactory())
     self.zmqrep = ZmqREPConnection(ZmqFactory())
     self.stat_pusher.tcpKeepalive = 1
     self.zmqrep.tcpKeepalive = 1
     self.stat_pusher.addEndpoints([ZmqEndpoint('connect', config.mqproxy.task_connect)])
     self.zmqrep.addEndpoints([ZmqEndpoint('connect', config.mqproxy.auth_connect)])
     self.zmqrep.gotMessage = self.process
     self.reject_debug = int(self.get_param_value('radius_reject_debug', 0)) == 1
     logger.info('radius auth worker %s start' % os.getpid())
     logger.info('init auth worker : %s ' % self.zmqrep)
     logger.info('init auth stat pusher : %s ' % self.stat_pusher)
Esempio n. 14
0
def create_table(config, table_name):
    try:
        print 'starting create table %s' % table_name
        db_engine = get_engine(config)
        metadata = models.get_metadata(db_engine)
        is_define = False
        for tname, table in metadata.tables.items():
            if tname == table_name:
                is_define = True
                table.create(db_engine)
                print 'create table %s done' % table_name
                break

        if not is_define:
            print 'table %s not define in taurusxradius.modules.models' % table_name
    except Exception as e:
        import traceback
        traceback.print_exc()
Esempio n. 15
0
def backup(config):
    try:
        db_engine = get_engine(config)
        metadata = models.get_metadata(db_engine)
        batchsize = 32 if config.database.dbtype == 'sqlite' else 500
        db_backup = DBBackup(metadata, excludes=['tr_online',
                                                 'system_session',
                                                 'system_cache',
                                                 'tr_ticket',
                                                 'tr_billing'], batchsize=batchsize)
        print 'start backup database...'
        backup_path = config.database.backup_path
        backup_file = 'taurusxr_upgrade_%s.json.gz' % utils.get_currdate()
        backupfs = os.path.join(backup_path, backup_file)
        db_backup.dumpdb(backupfs)
        print 'backup database %s done' % backupfs
    except:
        import traceback
        traceback.print_exc()
Esempio n. 16
0
 def __init__(self, config, dbengine, radcache = None):
     self.config = config
     self.load_plugins(load_types=['radius_acct_req'])
     self.db_engine = dbengine or get_engine(config)
     self.mcache = radcache
     self.dict = dictionary.Dictionary(os.path.join(os.path.dirname(taurusxradius.__file__), 'dictionarys/dictionary'))
     self.stat_pusher = ZmqPushConnection(ZmqFactory(), ZmqEndpoint('connect', config.mqproxy['task_connect']))
     self.pusher = ZmqPushConnection(ZmqFactory(), ZmqEndpoint('connect', config.mqproxy['acct_result']))
     self.puller = ZmqPullConnection(ZmqFactory(), ZmqEndpoint('connect', config.mqproxy['acct_message']))
     self.puller.onPull = self.process
     self.acct_class = {STATUS_TYPE_START: RadiusAcctStart,
      STATUS_TYPE_STOP: RadiusAcctStop,
      STATUS_TYPE_UPDATE: RadiusAcctUpdate,
      STATUS_TYPE_ACCT_ON: RadiusAcctOnoff,
      STATUS_TYPE_ACCT_OFF: RadiusAcctOnoff}
     logger.info('radius acct worker %s start' % os.getpid())
     logger.info('init acct worker pusher : %s ' % self.pusher)
     logger.info('init acct worker puller : %s ' % self.puller)
     logger.info('init acct stat pusher : %s ' % self.stat_pusher)