def main(): try: get_opt () print "mq dict ", gconf.mq_dict print "conf dict" , gconf.conf_dict g_logger.init_logger('CAS_watcher', gconf.conf_dict['log_level'], gconf.conf_dict['log_file'], SysLogHandler.LOG_LOCAL2) g_logger_info.init_logger('CAS_watcher', gconf.conf_dict['log_level'], gconf.conf_dict['log_file'], SysLogHandler.LOG_LOCAL1) watcher = task_warden() watcher.start() dp_th = dbpc.dbpc (gconf.dbpc_dict['host'], gconf.dbpc_dict['port'], gconf.dbpc_dict['service'], gconf.dbpc_dict['component']+"-watcher", gconf.dbpc_dict['interval']) dp_th.start () watcher.join() dp_th.join() except Exception, msg: raise Exception('start service failed [%s]' %(msg)) import traceback traceback.print_exc() sys.exit (-1)
def get_global_vars(cfg): pushresult_cfg = cfg['pushresultmq'] gv.pushresult_url = pushresult_cfg['url'] gv.pushresult_queue = pushresult_cfg['queue'] gv.pushresult_exchange = pushresult_cfg['exchange'] gv.pushresult_routing_key = pushresult_cfg['routing_key'] dbpc_cfg = cfg['dbpc'] gv.dbpc_host = dbpc_cfg['host'] gv.dbpc_port = dbpc_cfg['port'] gv.dppc_service = dbpc_cfg['service'] ''' gv.component = dbpc_cfg['component'] ''' gv.interval = dbpc_cfg['interval'] #gv.try_times_limit = dbpc_cfg['try_times_limit'] gv.dp = dbpc.dbpc(gv.dbpc_host, int(gv.dbpc_port), gv.dppc_service, "query_broker.qb_resultpush", int(gv.interval)) gv.thunder_server = cfg['thunderserver']['host'] statsd_cfg = cfg['statsdserver'] gv.statsdhost = statsd_cfg['host'] gv.statsdport = statsd_cfg['port'] gv.is_push = cfg['is_push']
def main(): try: get_opt () print "mq dict ", gconf.mq_dict print "conf dict" , gconf.conf_dict cas_utils.g_logger.init_logger('CAS_dicpatcher', gconf.conf_dict['log_level'], gconf.conf_dict['log_file'], SysLogHandler.LOG_LOCAL2) cas_utils.g_logger_info.init_logger('CAS_dispatcher', gconf.conf_dict['log_level'], gconf.conf_dict['log_file'], SysLogHandler.LOG_LOCAL1) cas_utils.g_statsd = statsd_operator(gconf.conf_dict['statsd']['host'], gconf.conf_dict['statsd']['port']) thread_tasker = fetch_jobs() thread_tasker.start() dp_th = dbpc.dbpc (gconf.dbpc_dict['host'], gconf.dbpc_dict['port'], gconf.dbpc_dict['service'], gconf.dbpc_dict['component']+"-dispatcher", gconf.dbpc_dict['interval']) dp_th.start () #join threads thread_tasker.join() dp_th.join() except Exception, msg: raise Exception('start service failed [%s]' %(msg)) import traceback traceback.print_exc() sys.exit (-1)
def main(): try: get_opt() print "mq dict ", gconf.mq_dict print "conf dict", gconf.conf_dict cas_utils.g_logger.init_logger('CAS_dicpatcher', gconf.conf_dict['log_level'], gconf.conf_dict['log_file'], SysLogHandler.LOG_LOCAL2) cas_utils.g_logger_info.init_logger('CAS_dispatcher', gconf.conf_dict['log_level'], gconf.conf_dict['log_file'], SysLogHandler.LOG_LOCAL1) cas_utils.g_statsd = statsd_operator(gconf.conf_dict['statsd']['host'], gconf.conf_dict['statsd']['port']) thread_tasker = fetch_jobs() thread_tasker.start() dp_th = dbpc.dbpc(gconf.dbpc_dict['host'], gconf.dbpc_dict['port'], gconf.dbpc_dict['service'], gconf.dbpc_dict['component'] + "-dispatcher", gconf.dbpc_dict['interval']) dp_th.start() #join threads thread_tasker.join() dp_th.join() except Exception, msg: raise Exception('start service failed [%s]' % (msg)) import traceback traceback.print_exc() sys.exit(-1)
def start_dbpc(config, module_name): ''' construct a dbpc object then start a new thread ''' dbpc_sender = dbpc(config['dbpc_server'], int(config['dbpc_port']), config['dbpc_service'], config['dbpc_component_prefix']+module_name, int(config['dbpc_report_interval'])) dbpc_sender.start()
def start_dbpc(config, module_name): ''' construct a dbpc object then start a new thread ''' dbpc_sender = dbpc(config['dbpc_server'], int(config['dbpc_port']), config['dbpc_service'], config['dbpc_component_prefix'] + module_name, int(config['dbpc_report_interval'])) dbpc_sender.start()
def get_global_vars(cfg): cas_cfg = cfg['casmq'] gv.cas_url = cas_cfg['url'] gv.cas_queue = cas_cfg['queue'] gv.cas_exchange = cas_cfg['exchange'] gv.cas_routing_key = cas_cfg['routing_key'] #gv.priority = cas_cfg['priority'] dbpc_cfg = cfg['dbpc'] gv.dbpc_host = dbpc_cfg['host'] gv.dbpc_port = dbpc_cfg['port'] gv.dppc_service = dbpc_cfg['service'] ''' gv.component = dbpc_cfg['component'] ''' gv.interval = dbpc_cfg['interval'] #gv.try_times_limit = dbpc_cfg['try_times_limit'] gv.dp = dbpc.dbpc(gv.dbpc_host, int(gv.dbpc_port), gv.dppc_service, "query_broker.qb_rating", int(gv.interval)) ''' swift_cfg = cfg['swift'] gv.st_auth = swift_cfg['ST_AUTH'] gv.st_user = swift_cfg['ST_USER'] gv.st_key = swift_cfg['ST_KEY'] ''' taskpriorit_cfg = cfg['taskprioritymq'] gv.taskpriorit_url = taskpriorit_cfg['url'] gv.taskpriorit_queue = taskpriorit_cfg['queue'] gv.taskpriorit_exchange = taskpriorit_cfg['exchange'] gv.taskpriorit_routing_key = taskpriorit_cfg['routing_key'] gv.databases = cfg['mysql'] gv.file_ext_list = cfg['filter']['file_ext'] gv.min_file_size = cfg['filter']['minfilesize'] gv.max_file_size = cfg['filter']['maxfilesize'] gv.suspicious_mime_types = cfg['filter']['suspicious_mime_types'] statsd_cfg = cfg['statsdserver'] gv.statsdhost = statsd_cfg['host'] gv.statsdport = statsd_cfg['port'] gv.score = cfg['filter']['score'] gv.video_rating_url = cfg['video_rating']
def get_global_vars(cfg): cas_cfg = cfg['casmq'] gv.cas_url = cas_cfg['url'] gv.cas_queue = cas_cfg['queue'] gv.cas_exchange = cas_cfg['exchange'] gv.cas_routing_key = cas_cfg['routing_key'] cashigh_cfg = cfg['cashighmq'] gv.cashigh_url = cashigh_cfg['url'] gv.cashigh_queue = cashigh_cfg['queue'] gv.cashigh_exchange = cashigh_cfg['exchange'] gv.cashigh_routing_key = cashigh_cfg['routing_key'] dbpc_cfg = cfg['dbpc'] gv.dbpc_host = dbpc_cfg['host'] gv.dbpc_port = dbpc_cfg['port'] gv.dppc_service = dbpc_cfg['service'] gv.interval = dbpc_cfg['interval'] gv.dp = dbpc.dbpc(gv.dbpc_host, int(gv.dbpc_port), gv.dppc_service, "query_broker.qb_priority", int(gv.interval)) taskpriorit_cfg = cfg['taskprioritymq'] gv.taskpriorit_url = taskpriorit_cfg['url'] gv.taskpriorit_queue = taskpriorit_cfg['queue'] gv.taskpriorit_exchange = taskpriorit_cfg['exchange'] gv.taskpriorit_routing_key = taskpriorit_cfg['routing_key'] gv.databases = cfg['mysql'] gv.file_ext_list = cfg['filter']['file_ext'] gv.min_file_size = cfg['filter']['minfilesize'] gv.max_file_size = cfg['filter']['maxfilesize'] gv.suspicious_mime_types = cfg['filter']['suspicious_mime_types'] statsd_cfg = cfg['statsdserver'] gv.statsdhost = statsd_cfg['host'] gv.statsdport = statsd_cfg['port'] redis_cfg = cfg['redis'] gv.rds_url_hot = redis_cfg['url_hot'] gv.rds_cas_high = redis_cfg['cas_high'] gv.rds_cas_low = redis_cfg['cas_low'] gv.rds_cas_black = redis_cfg['cas_black'] gv.ttl = redis_cfg['ttl'] gv.special_char = cfg['filter'].get('special_char', [])
def get_global_vars(cfg): qb_cfg = cfg['qbmq'] gv.qb_url = qb_cfg['url'] gv.qb_queue = qb_cfg['queue'] gv.qb_exchange = qb_cfg['exchange'] gv.qb_routing_key = qb_cfg['routing_key'] dbpc_cfg = cfg['dbpc'] gv.dbpc_host = dbpc_cfg['host'] gv.dbpc_port = dbpc_cfg['port'] gv.interval = dbpc_cfg['interval'] gv.dppc_service = dbpc_cfg['service'] gv.dp = dbpc.dbpc(gv.dbpc_host, int(gv.dbpc_port), gv.dppc_service, "query_broker.qb_push", int(gv.interval)) meidawise_cfg = cfg['vddbasync'] gv.mysystem_url = meidawise_cfg['url'] gv.mysystem_host = meidawise_cfg['host'] gv.mysystem_user = meidawise_cfg['user'] gv.mysystem_passwd = meidawise_cfg['passwd'] pushresult_cfg = cfg['pushresultmq'] gv.pushresult_url = pushresult_cfg['url'] gv.pushresult_queue = pushresult_cfg['queue'] gv.pushresult_exchange = pushresult_cfg['exchange'] gv.pushresult_routing_key = pushresult_cfg['routing_key'] taskpriorit_cfg = cfg['taskprioritymq'] gv.taskpriorit_url = taskpriorit_cfg['url'] gv.taskpriorit_queue = taskpriorit_cfg['queue'] gv.taskpriorit_exchange = taskpriorit_cfg['exchange'] gv.taskpriorit_routing_key = taskpriorit_cfg['routing_key'] statsd_cfg = cfg['statsdserver'] gv.statsdhost = statsd_cfg['host'] gv.statsdport = statsd_cfg['port']
logger.info("load config ok!") conn = connect(db_url=db_url, charset='utf8', use_unicode=False) Event.set_dbconnection(conn) # start dbpc thread if mwconf and 'dbpc' in mwconf: try: from dbpc import dbpc except: logger.warn("load dbpc module failed, ignore it!") else: dbpc_conf = mwconf.dbpc dbpc_sender = dbpc(dbpc_conf.host, int(dbpc_conf.port), dbpc_conf.service, dbpc_conf.component_prefix + MODULE_NAME, logger, int(dbpc_conf.heartbeat_interval)) dbpc_sender.start() logger.info('dbpc thread started. host is %s, port is %s' % (dbpc_conf.host, dbpc_conf.port)) while True: try: m_events = Event.loadfromdb( type='moment', alarm_time='0000-00-00 00:00:00', where='UNIX_TIMESTAMP(start_time) > {}'.format( int(now() - conf.scan_range))) logger.debug("load moment events: %s" % str(m_events)) l_events = Event.loadfromdb(alarm_time='0000-00-00 00:00:00',
master_thr = threading.Thread(target=master, args=(dao, kev)) master_thr.setDaemon(True) master_thr.start() logger.info("Kingship thread running......") time.sleep(0.5) # create dbpc thread dbpc_thr = None try: from dbpc import dbpc dbpc_cfg = config.dbpc if dbpc_cfg: dbpc_thr = dbpc(dbpc_cfg['host'], dbpc_cfg['port'], dbpc_cfg['service'], dbpc_cfg['component_prefix'] + MODULE, dbpc_cfg['heartbeat_interval']) dbpc_thr.start() logger.info("dbpc thread running......") except (ImportError, AttributeError): # ignore dbpc logger.info("Ignore dbpc......") def do_push(customer_id): return dao.pushall(customer_id, PUSH_TIMEOUT) def log_result(request, result): logger.info("customer:{} push {} results".format( str(request.args[0]), result)) def exception_alarm(request, exc_info):
def get_global_vars(cfg): finsh_cfg = cfg['finshmq'] gv.finsh_url = finsh_cfg['url'] gv.finsh_queue = finsh_cfg['queue'] gv.finsh_exchange = finsh_cfg['exchange'] gv.finsh_routing_key = finsh_cfg['routing_key'] vddb_querycfg = cfg['vddbquerymq'] gv.vddb_queryurl = vddb_querycfg['url'] gv.vddb_queryqueue = vddb_querycfg['queue'] gv.vddb_queryexchange = vddb_querycfg['exchange'] gv.vddb_queryrouting_key = vddb_querycfg['routing_key'] vddb_resultcfg = cfg['vddbresultmq'] gv.vddb_resulturl = vddb_resultcfg['url'] gv.vddb_resultqueue = vddb_resultcfg['queue'] gv.vddb_resultexchange = vddb_resultcfg['exchange'] gv.vddb_resultrouting_key = vddb_resultcfg['routing_key'] dbpc_cfg = cfg['dbpc'] gv.dbpc_host = dbpc_cfg['host'] gv.dbpc_port = dbpc_cfg['port'] gv.dppc_service = dbpc_cfg['service'] gv.interval = dbpc_cfg['interval'] gv.dp = dbpc.dbpc(gv.dbpc_host, int(gv.dbpc_port), gv.dppc_service, "query_broker.qb_pull", int(gv.interval)) meidawise_cfg = cfg['vddbasync'] gv.mysystem_url = meidawise_cfg['url'] gv.mysystem_host = meidawise_cfg['host'] gv.mysystem_user = meidawise_cfg['user'] gv.mysystem_passwd = meidawise_cfg['passwd'] gv.mysystem_port = meidawise_cfg['port'] gv.file_tmpdir = cfg['tmpdir'] cas_cfg = cfg['casmq'] gv.cas_url = cas_cfg['url'] gv.cas_queue = cas_cfg['queue'] gv.cas_exchange = cas_cfg['exchange'] gv.cas_routing_key = cas_cfg['routing_key'] pushresult_cfg = cfg['pushresultmq'] gv.pushresult_url = pushresult_cfg['url'] gv.pushresult_queue = pushresult_cfg['queue'] gv.pushresult_exchange = pushresult_cfg['exchange'] gv.pushresult_routing_key = pushresult_cfg['routing_key'] gv.company = cfg['company'] #gv.ZIP_EXTENSIONS = cfg['zip_extensions'] statsd_cfg = cfg['statsdserver'] gv.statsdhost = statsd_cfg['host'] gv.statsdport = statsd_cfg['port'] redis_cfg = cfg['redis'] gv.rds_url_hot = redis_cfg['url_hot'] gv.rds_cas_high = redis_cfg['cas_high'] gv.rds_cas_low = redis_cfg['cas_low'] gv.rds_cas_black = redis_cfg['cas_black']
master_thr = threading.Thread(target=master, args=(dao, kev)) master_thr.setDaemon(True) master_thr.start() logger.info("Kingship thread running......") time.sleep(0.5) # create dbpc thread dbpc_thr = None try: from dbpc import dbpc dbpc_cfg = config.dbpc if dbpc_cfg: dbpc_thr = dbpc(dbpc_cfg['host'], dbpc_cfg['port'], dbpc_cfg['service'], dbpc_cfg['component_prefix'] + MODULE, dbpc_cfg['heartbeat_interval']) dbpc_thr.start() logger.info("dbpc thread running......") except (ImportError, AttributeError): # ignore dbpc logger.info("Ignore dbpc......") def do_push(customer_id): return dao.pushall(customer_id, PUSH_TIMEOUT) def log_result(request, result): logger.info("customer:{} push {} results".format(str(request.args[0]), result)) def exception_alarm(request, exc_info):
logger.info("load config ok!") conn = connect(db_url=db_url, charset='utf8', use_unicode=False) Event.set_dbconnection(conn) # start dbpc thread if mwconf and 'dbpc' in mwconf: try: from dbpc import dbpc except: logger.warn("load dbpc module failed, ignore it!") else: dbpc_conf = mwconf.dbpc dbpc_sender = dbpc(dbpc_conf.host, int(dbpc_conf.port), dbpc_conf.service, dbpc_conf.component_prefix + MODULE_NAME, logger, int(dbpc_conf.heartbeat_interval)) dbpc_sender.start() logger.info( 'dbpc thread started. host is %s, port is %s' % (dbpc_conf.host, dbpc_conf.port)) while True: try: m_events = Event.loadfromdb(type='moment', alarm_time='0000-00-00 00:00:00', where='UNIX_TIMESTAMP(start_time) > {}'.format( int(now() - conf.scan_range))) logger.debug("load moment events: %s" % str(m_events)) l_events = Event.loadfromdb(alarm_time='0000-00-00 00:00:00',
import os import sys import web import logging.config bin_dir = os.path.dirname(os.path.realpath(__file__)) work_dir = os.path.dirname(bin_dir) sys.path.insert(0, bin_dir) from matches import matches from dbpc import dbpc from web_config import config #logging.config.fileConfig("/".join([work_dir, 'etc', 'log.conf']), # disable_existing_loggers=False) urls = ( '/*.*/matches', 'matches' ) app = web.application(urls, globals()) if __name__ == '__main__': app.run() else: d = dbpc(config['dbpc']['server'], int(config['dbpc']['port']), 'thunder', config['dbpc']['component']+"matches_interface", int(config['dbpc']['report_interval'])) d.start() app = app.wsgifunc()
#! /usr/bin/python import os import sys import web import logging.config bin_dir = os.path.dirname(os.path.realpath(__file__)) work_dir = os.path.dirname(bin_dir) sys.path.insert(0, bin_dir) from matches import matches from dbpc import dbpc from web_config import config #logging.config.fileConfig("/".join([work_dir, 'etc', 'log.conf']), # disable_existing_loggers=False) urls = ('/*.*/matches', 'matches') app = web.application(urls, globals()) if __name__ == '__main__': app.run() else: d = dbpc(config['dbpc']['server'], int(config['dbpc']['port']), 'thunder', config['dbpc']['component'] + "matches_interface", int(config['dbpc']['report_interval'])) d.start() app = app.wsgifunc()
def ProcessDBPC(config): return dbpc(config.get('host', '192.168.1.146'), config.get('port', 5800), config.get('service', 'xhs'), config.get('component', 'querier'), config.get('interval', 120))