def publish_test(test_id): config_path = current_dir+"/config.yaml" rbmq_username = get_config("RABBITMQ_SERVER_DETAILS", "USERNAME", config_path) rbmq_password = get_config("RABBITMQ_SERVER_DETAILS", "PASSWORD", config_path) rbmq_ip = get_config("RABBITMQ_SERVER_DETAILS", "SERVER_IP", config_path) try: credentials = pika.PlainCredentials(rbmq_username, rbmq_password) connection = pika.BlockingConnection(pika.ConnectionParameters(host=rbmq_ip, credentials=credentials)) except gaierror as e: print("") channel = connection.channel() #message = ' '.join(sys.argv[1:]) or "Hello World!" message = {"id":test_id} channel.queue_declare(queue='test_run.jobs.queue', durable=True) channel.basic_qos(prefetch_count=1) channel.exchange_declare(exchange='test_run_jobs', exchange_type='topic') channel.queue_bind(exchange="test_run_jobs", queue="test_run.jobs.queue", routing_key="hello.#") channel.basic_publish(exchange='test_run_jobs', routing_key='hello.hi.how', body=json.dumps(message), properties=pika.BasicProperties( delivery_mode = 2, # make message persistent )) print(" [x] Sent {}".format(json.dumps(message))) connection.close()
def __init__(self, host, port, user=func.get_config("monitor_mysql", "user"), password=func.get_config("monitor_mysql", "passwd"), db='mysql'): self.ip = host self.port = port self.user = user self.password = password self.db = db self.connect = None # try: self.connect = MySQLdb.connect( host=self.ip, user=self.user, passwd=self.password, db=self.db, port=int(self.port), charset="utf8", cursorclass=MySQLdb.cursors.DictCursor, connect_timeout=2) except MySQLdb.Error, e: self.connect = None traceback.print_exc() return None
def main(): user = func.get_config('mysql_db', 'username') passwd = func.get_config('mysql_db', 'password') servers = func.mysql_query( "select host,port,binlog_store_days from db_cfg_mysql where is_delete=0 and monitor=1 and binlog_auto_purge=1;" ) if servers: print("%s: admin mysql purge binlog controller started." % (time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()), )) plist = [] for row in servers: host = row[0] port = row[1] binlog_store_days = row[2] p = Process(target=admin_mysql_purge_binlog, args=(host, port, user, passwd, binlog_store_days)) plist.append(p) for p in plist: p.start() time.sleep(60) for p in plist: p.terminate() for p in plist: p.join() print("%s: admin mysql purge binlog controller finished." % (time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()), ))
def main(): #get mysql servers list servers = func.mysql_query('select m.id,m.host,m.port,m.tags,m.bigtable_size from db_servers_mysql as m , mysql_status as s where m.is_delete=0 and m.monitor=1 and m.bigtable_monitor=1 and m.host=s.host and m.port=s.port and s.role="master" order by host;') #++ guoqi exeTimeout = 20 cnfKey = "monitor_mysql" username = func.get_config(cnfKey,'user') password = func.get_config(cnfKey,'passwd') if servers: print("%s: check mysql bigtable controller started." % (time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()),)); plist = [] no = 1 for row in servers: (server_id, host, port, tags, bigtable_size) = row check_mysql_bigtable(no,host,port,username,password,server_id,tags,bigtable_size) no += 1 ''' p = Process(target = check_mysql_bigtable, args = (no,host,port,username,password,server_id,tags,bigtable_size)) plist.append(p) no += 1 for p in plist: p.start() time.sleep(exeTimeout) for p in plist: p.terminate() for p in plist: p.join() ''' print("%s: check mysql bigtable controller finished." % (time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()),))
def main(): """ Rabbitmq consumer. """ try: # Configuration parameters. log_level = "INFO" #get_config(LOG_KEY, "level") log_file = "/var/log/test_run.log" #get_config(LOG_KEY, "filename") log_file_max_bytes = 5242880 #get_config(LOG_KEY, "maxbytes") log_file_count = 2 #get_config(LOG_KEY, "backupcount") # If invalid log level given then bydefault Default will be taken. numeric_level = getattr(logging, log_level.upper(), 10) global logger logging.Formatter.converter = time.gmtime logger = logging.getLogger(__name__) LOG_LEVEL = numeric_level # logging.basicConfig(filename=log_file, level=logging.INFO) logging.basicConfig( format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', datefmt='%d-%b-%y %H:%M:%S', filename=log_file, level=logging.INFO) # Rabbitmq details. config_path = current_dir + "/config.yaml" rbmq_username = get_config("RABBITMQ_SERVER_DETAILS", "USERNAME", config_path) rbmq_password = get_config("RABBITMQ_SERVER_DETAILS", "PASSWORD", config_path) rbmq_ip = get_config("RABBITMQ_SERVER_DETAILS", "SERVER_IP", config_path) TEST_RUN_QUEUE_NAME = "test_run.jobs.queue" credentials = pika.PlainCredentials(rbmq_username, rbmq_password) connection = pika.BlockingConnection( pika.ConnectionParameters(host=rbmq_ip, credentials=credentials)) test_run_ch = connection.channel() #test_run_ch.queue_declare(queue=TEST_RUN_QUEUE_NAME, durable=True) #test_run_ch.basic_qos(prefetch_count=1) test_run_ch.basic_consume(process_test_run_request, queue=TEST_RUN_QUEUE_NAME) logger.info(' [*] Waiting for messages.') print(' [*] Waiting for messages.') # Start the consumer. test_run_ch.start_consuming() except KeyError as e: logging.exception("missing key : {}".format(e)) except Exception as e: logging.exception("exception occured : {}".format(e)) if "connection" in locals(): connection.close()
def __init__(self): print(current_dir) config_path = current_dir + "/config.yaml" self.db_username = get_config("POSTGRES_DB_DETAILS", "USERNAME", config_path) self.db_password = get_config("POSTGRES_DB_DETAILS", "PASSWORD", config_path) self.db_ip_port = get_config("POSTGRES_DB_DETAILS", "SERVER_IP", config_path)+ ":"\ + get_config("POSTGRES_DB_DETAILS", "PORT", config_path) self.engine = create_engine( 'postgresql+psycopg2://{}:{}@{}/test_db'.format( self.db_username, self.db_password, self.db_ip_port)) self.session = sessionmaker(bind=self.engine)() self.session.connection().connection.set_isolation_level(1)
def login(self, args): code = args.get('code') # get access_token log.debug(get_config('login.weibo.access_token_url') + code) token_resp = post_to_remote(get_config('login.weibo.access_token_url') + code, {}) log.debug("get token from Weibo:" + str(token_resp)) access_token = token_resp['access_token'] uid = token_resp['uid'] log.debug("get token info from weibo") data = {"provider": "weibo", "code": code, "access_token": access_token, "uid": uid, "hackathon_name": hackathon_name} return post_to_remote('%s/api/user/login' % hackathon_api_url, data)
def start(parse): config = functions.get_config(db) round = Round(db, config) round.next() functions.set_interval(round.next, 10)
def load_user(id): try: ur = get_remote("%s/api/user?uid=%d" % (get_config("hackathon-api.endpoint"), int(id))) ur = json.loads(ur) return LoginUser(id=ur["id"], name=ur["name"], nickname=ur["nickname"], avatar_url=ur["avatar_url"]) except Exception as e: log.error(e) return None
def logout(): url = "%s/api/user/login?uid=%d" % (get_config("hackathon-api.endpoint"), g.user.id) delete_remote(url, { "token": session["token"] }) session.pop("token") logout_user() return redirect("index")
def main(): #get os servers list zbItems=func.mysql_query("SELECT item_type, stat_item_name, zabbix_item_name, zabbix_item_value_unit, zabbix_server,last_stat_time FROM zabbix_item where item_type='os';") zbSrvItems = {} for zbItem in zbItems: (item_type, stat_item_name, zabbix_item_name, zabbix_item_value_unit, zabbix_server,last_stat_time) = zbItem if not zbSrvItems.has_key(zabbix_server): zbSrvItems[zabbix_server] = [] zbSrvItems.get(zabbix_server).append(zbItem) #print zbSrvItems zbSectors = ["zabbix_dc", "zabbix_dba"] zbApis = {} for zbSector in zbSectors: zbKey = func.get_config(zbSector,'key') zbHost = func.get_config(zbSector,'host') zbUser = func.get_config(zbSector,'user') zbPasswd = func.get_config(zbSector, 'passwd') zbApis[zbKey] = zbCli(zbHost, zbUser, zbPasswd) logger.info("check os controller started.") servers=func.mysql_query("select id, host,tags, create_time from db_servers_os where is_delete=0 and monitor=1;") if servers: plist = [] proHostsNum = len(servers)/cpus+1 logger.info("check os sum:%s, cpus:%s, percpu:%s" % (len(servers), cpus, proHostsNum)) for i in range(0,len(servers), proHostsNum): proSrvs = servers[i:i+proHostsNum] p = Process(target = check_hosts, args=(i, proSrvs, zbSrvItems, zbApis)) plist.append(p) intervals = exeTimeout/len(plist) if intervals <= 0: intervals = 1 for p in plist: p.start() #time.sleep(intervals) time.sleep(1) for p in plist: p.join(timeout=10) else: logger.warning("check os: not found any servers") func.mysql_exec('update os_status set zabbix=0,create_time=now() where create_time<date_sub(now(), interval %s second)' % (min_interval)) logger.info("check os controller finished.")
def logout(): url = "%s/api/user/login?uid=%d" % (get_config("hackathon-api.endpoint"), g.user.id) try: delete_remote(url, { "token": session["token"] }) except Exception as e: log.debug("logout remote failed") log.error(e) logout_user() return redirect("/")
def login(self, args): code = args.get('code') # get access_token log.debug(get_config('login.weibo.access_token_url') + code) token_resp = post_to_remote( get_config('login.weibo.access_token_url') + code, {}) log.debug("get token from Weibo:" + str(token_resp)) access_token = token_resp['access_token'] uid = token_resp['uid'] log.debug("get token info from weibo") data = { "provider": "weibo", "code": code, "access_token": access_token, "uid": uid, "hackathon_name": hackathon_name } return post_to_remote('%s/api/user/login' % hackathon_api_url, data)
def setup_db(): config_path = current_dir + "/config.yaml" db_username = get_config("POSTGRES_DB_DETAILS", "USERNAME", config_path) db_password = get_config("POSTGRES_DB_DETAILS", "PASSWORD", config_path) db_ip_port = get_config("POSTGRES_DB_DETAILS", "SERVER_IP", config_path)+ ":"\ + get_config("POSTGRES_DB_DETAILS", "PORT", config_path) engine = create_engine('postgresql+psycopg2://{}:{}@{}'.format( db_username, db_password, db_ip_port)) session = sessionmaker(bind=engine)() session.connection().connection.set_isolation_level(0) try: session.execute("CREATE DATABASE test_db") #create db except DatabaseError as e: print("database already exists") engine = create_engine('postgresql+psycopg2://{}:{}@{}/test_db'.format( db_username, db_password, db_ip_port)) session = sessionmaker(bind=engine)() session.connection().connection.set_isolation_level(1) # create tables Base.metadata.create_all(engine)
def main(): user = func.get_config('mysql_db','username') passwd = func.get_config('mysql_db','password') servers=func.mysql_query("select host,port,binlog_store_days from db_servers_mysql where is_delete=0 and monitor=1 and binlog_auto_purge=1;") if servers: print("%s: admin mysql purge binlog controller started." % (time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()),)); plist = [] for row in servers: host=row[0] port=row[1] binlog_store_days=row[2] p = Process(target = admin_mysql_purge_binlog, args = (host,port,user,passwd,binlog_store_days)) plist.append(p) for p in plist: p.start() time.sleep(60) for p in plist: p.terminate() for p in plist: p.join() print("%s: admin mysql purge binlog controller finished." % (time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()),))
def login(self, args): code = args.get('code') url = get_config('login.gitcafe.access_token_url') + code opener = urllib2.build_opener(urllib2.HTTPHandler) request = urllib2.Request(url, "") # req = requests.post(url, verify=True) resp = opener.open(request) # log.debug("get token from gitcafe:" + resp.read()) token_resp = json.loads(resp.read()) # token_resp = json.loads(resp.read()) # token_resp = req.content() data = {"provider": "gitcafe", "access_token": token_resp['access_token'], "hackathon_name": hackathon_name} return post_to_remote('%s/api/user/login' % hackathon_api_url, data)
def stat_mysql_tablespace(): print("%s: check mysql bigtable controller started." % (time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()), )) # get mysql servers list servers = func.mysql_query( 'select m.id,m.host,m.port,m.tags,m.bigtable_size from db_servers_mysql as m , mysql_status as s where m.is_delete=0 and m.monitor=1 and m.bigtable_monitor=1 and m.host=s.host and m.port=s.port and s.role="master" order by host;' ) cnfKey = "monitor_mysql" username = func.get_config(cnfKey, 'user') password = func.get_config(cnfKey, 'passwd') if servers: no = 1 for row in servers: (server_id, host, port, tags, bigtable_size) = row if re.search("^172.30", host): continue print "%s/%s %s, %s" % (no, len(servers), host, port) check_mysql_tablespace(no, host, port, username, password, server_id, tags, bigtable_size) no += 1 print("%s: check mysql bigtable controller finished." % (time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()), ))
def login(self, args): code = args.get('code') # get access_token token_resp = get_remote(get_config('login.github.access_token_url') + code) log.debug("get token from github:" + token_resp) start = token_resp.index('=') end = token_resp.index('&') access_token = token_resp[start + 1:end] # get user info # user_info_resp = get_remote(get_config('login/github/user_info_url') + access_token) # conn.request('GET',url,'',{'user-agent':'flask'}) log.debug("get token info from github") data = {"provider": "github", "code": code, "access_token": access_token, "hackathon_name": hackathon_name} return post_to_remote('%s/api/user/login' % hackathon_api_url, data)
def post_test_log(): qtest_config = get_config() api_token = qtest_config["qtest_api_token"] qTestUrl = qtest_config["qtest_url"] projectId = os.environ["PROJECT_ID"] dictionary = ast.literal_eval(sys.argv[1]) for key in dictionary: for elem in dictionary.get(key): className = key methodName = elem parse_junit_results(className, methodName) baseUrl = '{}/api/v3/projects/{}/auto-test-logs' testLogUrl = baseUrl.format(qTestUrl, projectId) try: update = sys.argv[3] if update == 'update': testCycle = get_test_cycle() else: testCycle = post_test_cycle() except: print( "Error: Enter valid argument (true or false) to update existing test cycle or not" ) return -1 payload = { 'skipCreatingAutomationModule': False, 'test_logs': testbody, 'execution_date': time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime()), 'test_cycle': testCycle } key = '{}' key = key.format(api_token) headers = {'Content-Type': 'application/json', "Authorization": key} params = {'type': 'automation'} try: r = requests.post(testLogUrl, params=params, data=json.dumps(payload), headers=headers) print(r.text) except: print("Error: Unable to post data to qTest Manager API.") return -1
def login(self, args): code = args.get('code') state = "openhackathon" # if state != QQ_OAUTH_STATE: # log.warn("STATE match fail. Potentially CSFR.") # return "UnAuthorized", 401 # get access token token_resp = get_remote(get_config("login.qq.access_token_url") + code + '&state=' + state) log.debug("get token from qq:" + token_resp) start = token_resp.index('=') end = token_resp.index('&') access_token = token_resp[start + 1:end] # get user info data = {"provider": "qq", "access_token": access_token} # url = get_config("login/qq/user_info_url") % (access_token, client_id, openid) return post_to_remote('%s/api/user/login' % hackathon_api_url, data)
def login(self, args): code = args.get('code') url = get_config('login.gitcafe.access_token_url') + code opener = urllib2.build_opener(urllib2.HTTPHandler) request = urllib2.Request(url, "") # req = requests.post(url, verify=True) resp = opener.open(request) # log.debug("get token from gitcafe:" + resp.read()) token_resp = json.loads(resp.read()) # token_resp = json.loads(resp.read()) # token_resp = req.content() data = { "provider": "gitcafe", "access_token": token_resp['access_token'], "hackathon_name": hackathon_name } return post_to_remote('%s/api/user/login' % hackathon_api_url, data)
def login(self, args): code = args.get('code') # get access_token token_resp = get_remote( get_config('login.github.access_token_url') + code) log.debug("get token from github:" + token_resp) start = token_resp.index('=') end = token_resp.index('&') access_token = token_resp[start + 1:end] # get user info # user_info_resp = get_remote(get_config('login/github/user_info_url') + access_token) # conn.request('GET',url,'',{'user-agent':'flask'}) log.debug("get token info from github") data = { "provider": "github", "code": code, "access_token": access_token, "hackathon_name": hackathon_name } return post_to_remote('%s/api/user/login' % hackathon_api_url, data)
def login(self, args): code = args.get('code') state = "openhackathon" # if state != QQ_OAUTH_STATE: # log.warn("STATE match fail. Potentially CSFR.") # return "UnAuthorized", 401 # get access token token_resp = get_remote( get_config("login.qq.access_token_url") + code + '&state=' + state) log.debug("get token from qq:" + token_resp) start = token_resp.index('=') end = token_resp.index('&') access_token = token_resp[start + 1:end] # get user info data = { "provider": "qq", "access_token": access_token, "hackathon_name": hackathon_name } # url = get_config("login/qq/user_info_url") % (access_token, client_id, openid) return post_to_remote('%s/api/user/login' % hackathon_api_url, data)
def main(hosts=None): dohosts = None if hosts != None: dohosts = hosts.split(",") #get mysql servers list #servers = func.mysql_query('select id,host,port,username,password,tags from db_servers_mysql where is_delete=0 and monitor=1;') servers = func.mysql_query( 'select id,host,port,tags from db_servers_mysql where is_delete=0 and monitor=1 order by rand();' ) logger.info("check mysql controller started.") #++ guoqi exeTimeout = 60 cnfKey = "monitor_mysql" username = func.get_config(cnfKey, 'user') password = func.get_config(cnfKey, 'passwd') min_interval = func.get_option('min_interval') if servers: plist = [] for row in servers: (server_id, host, port, tags) = row if dohosts != None and dohosts.count(host) <= 0: continue check_mysql(host, port, username, password, server_id, tags) continue plist = [] (server_id, host, port, tags) = row p = Process(target=check_mysql, args=(host, port, username, password, server_id, tags)) p.start() i = 0 while (i < 10): time.sleep(1) i += 1 if p.is_alive(): continue p.join(timeout=2) ''' for p in plist: p.start() for p in plist: p.join(timeout=exeTimeout) ''' else: logger.warning("check mysql: not found any servers") func.mysql_exec( 'DELETE ds FROM mysql_replication AS ds, (SELECT s.id,d.host FROM mysql_replication AS s LEFT JOIN db_servers_mysql AS d ON d.is_delete=0 AND d.monitor=1 AND s.host=d.host AND s.port=d.port HAVING d.`host` IS NULL) AS t WHERE ds.id=t.id' ) func.mysql_exec( 'DELETE ds FROM mysql_status AS ds, (SELECT s.id,d.host FROM mysql_status AS s LEFT JOIN db_servers_mysql AS d ON d.is_delete=0 AND d.monitor=1 AND s.host=d.host AND s.port=d.port HAVING d.`host` IS NULL) AS t WHERE ds.id=t.id' ) func.mysql_exec( 'DELETE ds FROM db_status AS ds, (SELECT s.id,d.host FROM db_status AS s LEFT JOIN db_servers_mysql AS d ON d.is_delete=0 AND d.monitor=1 AND s.host=d.host AND s.port=d.port WHERE db_type="mysql" HAVING d.`host` IS NULL) AS t WHERE ds.id=t.id' ) func.mysql_exec( 'update mysql_status set connect=0,create_time=now() where create_time<date_sub(now(), interval %s second)' % (min_interval)) func.mysql_exec( 'update mysql_replication set slave_io_run="No",slave_sql_run="No",create_time=now() where create_time<date_sub(now(), interval %s second)' % (min_interval)) logger.info("check mysql controller finished.")
__author__ = 'root' # -*- coding:utf8 -*- # encoding = utf-8 from functions import get_remote, get_config, post_to_remote import urllib2 from log import log import json hackathon_api_url = get_config("hackathon-api.endpoint") hackathon_name = get_config("javascript.hackathon.name") class LoginUser: def __init__(self, **kwargs): self.id = kwargs["id"] self.name = kwargs["name"] self.nickname = kwargs["nickname"] self.avatar_url = kwargs["avatar_url"] def is_authenticated(self): return True def is_active(self): return True def is_anonymous(self): return False def get_id(self): return unicode(self.id)
def js_config(): resp = Response(response="var CONFIG=%s" % json.dumps(get_config("javascript")), status=200, mimetype="application/javascript") return resp
with torch.no_grad(): for step, batch in enumerate(self.test_loader): data, labels = batch[0].to(DEVICE), batch[1] logits_list, cam_1, cam_2, mask_1, mask_2 = self.model( data, None) probs = F.softmax(logits_list[-1], dim=-1).cpu().detach().numpy() labels = labels.cpu().detach().numpy() self.mean_accuracy.update(probs, labels) acc = self.mean_accuracy.compute() confusion = self.mean_accuracy.confusion() return acc, confusion if __name__ == '__main__': my_args = get_config() get_folders(my_args) if my_args.dataset_name == 'SEED' and my_args.dataset_size == 'large' and my_args.people_num == 45: raise RuntimeError('处理 SEED large 数据之前,请先将 people_num 改为 15!') run_select = int(input('选择要跑的人群(1-up, 2-bad, 3-middle, 4-good, 5-all):')) run_dic = {'1': '有提升空间的', '2': '较差', '3': '中等', '4': '较好', '5': '全部'} if my_args.dataset_name == 'MPED': raise RuntimeError("目前不支持 MPED!") # SEED elif my_args.dataset_name == 'SEED': up = [ 1, 2, 4, 5, 11, 12, 13, 14, 15, 18, 19, 20, 23, 24, 26, 27, 28, 29,
def oauth_meta_content(): return { 'weibo': get_config('login.weibo.meta_content'), 'qq': get_config('login.qq.meta_content') }
from functions import get_config qtest_config = get_config() repo = qtest_config["local_repository"] print(repo)
def get_config(self, config): """ get requested config from helix""" return functions.get_config(self.host, self.cluster, config)
def flags(parse): config = functions.get_config(db) flags = Flags(db, config) flags.start()
def convert_to_mks(x, y=None): ppm = get_config("running", "PPM") if not y is None: return x / ppm, y / ppm else: return x / ppm
def convert_from_mks(x, y=None): ppm = get_config("running", "PPM") if not y is None: return x * ppm, y * ppm else: return x * ppm
import sys import string import time import datetime import MySQLdb import logging import logging.config logging.config.fileConfig("etc/logger.ini") logger = logging.getLogger("lepus") path = './include' sys.path.insert(0, path) import functions as func import thread from multiprocessing import Process dbhost = func.get_config('monitor_server', 'host') dbport = func.get_config('monitor_server', 'port') dbuser = func.get_config('monitor_server', 'user') dbpasswd = func.get_config('monitor_server', 'passwd') dbname = func.get_config('monitor_server', 'dbname') def check_os(ip, community, filter_os_disk, tags): #func.mysql_exec("insert into os_status_history select *, LEFT(REPLACE(REPLACE(REPLACE(create_time,'-',''),' ',''),':',''),12) from os_status where ip='%s';" %(ip),'') func.mysql_exec( "insert into os_disk_history select *, LEFT(REPLACE(REPLACE(REPLACE(create_time,'-',''),' ',''),':',''),12) from os_disk where ip='%s';" % (ip), '') func.mysql_exec( "insert into os_diskio_history select *, LEFT(REPLACE(REPLACE(REPLACE(create_time,'-',''),' ',''),':',''),12) from os_diskio where ip='%s';" % (ip), '')
def index(): return render_template('/admin/login.html', meta_content={ 'weibo': get_config('login.weibo.meta_content'), "qq": get_config('login.qq.meta_content') })
def test_get_config(self): config_path = "tests/dummy_config.yaml" rbmq_username = functions.get_config("RABBITMQ_SERVER_DETAILS", "USERNAME", config_path) self.assertEqual(rbmq_username, "sid")
def main(): #host = "192.168.1.175" doHosts = ["172.21.100.40", "172.21.100.121", "172.21.100.122"] doHosts = ["172.21.100.88"] qSql = ''' SELECT d.id,d.host,d.port,d.dsn,d.tags,s.id FROM db_servers_oracle d LEFT JOIN oracle_status s ON d.`host`=s.`host`AND d.`port`=s.`port` AND s.`database_role`='PRIMARY' WHERE is_delete=0 AND monitor=1 AND awrreport=1 HAVING s.id IS NOT NULL ''' servers = func.mysql_query(qSql) logger.info("check oracle awsreport started.") cnfKey = "monitor_oracle" username = func.get_config(cnfKey, 'user') password = func.get_config(cnfKey, 'passwd') errSrvs = [] if servers: for row in servers: (server_id, host, port, dsn, tags, stid) = row if doHosts.count(host) <= 0: #continue pass logger.info("AwrReport:%s,%s,%s" % (host, port, dsn)) (isOk, reports) = createReport(host, port, dsn, username, password) if not isOk: logger.error("Err createReport:%s,%s,%s" % (host, port, dsn)) errSrvs.append([server_id, host, port, tags]) continue for report in reports: (host, port, instance_num, statTime, textReportFile, htmlReportFile) = report sectors = parseAwrReport(textReportFile) ## LoadProfile lpVals = parseLoadProfile(sectors[SECTOR_LoadProfile]) logger.info("parseLoadProfile OK") # save db saveLoadProfile(statTime, server_id, host, port, tags, instance_num, lpVals) ## TOP SQL topSqls = parseTopsql(sectors) logger.info("parseTopsql OK") # save db saveTopsql(statTime, server_id, host, port, tags, instance_num, topSqls) # doTopsqlSummary() # check err db_type = "oracle" create_time = datetime.datetime.now() alarm_item = "oracle AwrReport" alarm_value = "Fail" level = "warning" message = "" for errSrv in errSrvs: (server_id, db_host, db_port, tags) = errSrv func.add_alarm(server_id, tags, db_host, db_port, create_time, db_type, alarm_item, alarm_value, level, message)
__author__ = 'root' # -*- coding:utf8 -*- # encoding = utf-8 from functions import get_remote, get_config, post_to_remote import urllib2 from log import log import json from flask import redirect, url_for hackathon_api_url = get_config("hackathon-api.endpoint") hackathon_name = get_config("javascript.hackathon.name") class LoginUser: def __init__(self, **kwargs): self.id = kwargs["id"] self.name = kwargs["name"] self.nickname = kwargs["nickname"] self.avatar_url = kwargs["avatar_url"] def is_authenticated(self): return True def is_active(self): return True def is_anonymous(self): return False
import sys import string import time import datetime import MySQLdb import logging import logging.config logging.config.fileConfig("etc/logger.ini") logger = logging.getLogger("lepus") path='./include' sys.path.insert(0,path) import functions as func import thread from multiprocessing import Process; dbhost = func.get_config('monitor_server','host') dbport = func.get_config('monitor_server','port') dbuser = func.get_config('monitor_server','user') dbpasswd = func.get_config('monitor_server','passwd') dbname = func.get_config('monitor_server','dbname') def check_os(ip,community,filter_os_disk,tags): func.mysql_exec("insert into os_status_history select *, LEFT(REPLACE(REPLACE(REPLACE(create_time,'-',''),' ',''),':',''),12) from os_status where ip='%s';" %(ip),'') func.mysql_exec("insert into os_disk_history select *, LEFT(REPLACE(REPLACE(REPLACE(create_time,'-',''),' ',''),':',''),12) from os_disk where ip='%s';" %(ip),'') func.mysql_exec("insert into os_diskio_history select *, LEFT(REPLACE(REPLACE(REPLACE(create_time,'-',''),' ',''),':',''),12) from os_diskio where ip='%s';" %(ip),'') func.mysql_exec("insert into os_net_history select *, LEFT(REPLACE(REPLACE(REPLACE(create_time,'-',''),' ',''),':',''),12) from os_net where ip='%s';" %(ip),'') func.mysql_exec("delete from os_status where ip='%s'" %(ip),'') func.mysql_exec("delete from os_disk where ip='%s'" %(ip),'') func.mysql_exec("delete from os_diskio where ip='%s'" %(ip),'') func.mysql_exec("delete from os_net where ip='%s'" %(ip),'')
import functions as fun import os import re config = fun.get_config("config.json") if config is not None: path = "volleyball.txt" if os.path.isfile(path): with open(path, 'r') as f: lines = f.readlines() operations = [] for line in lines: player = re.findall('\\d. .*\\n', line) if len(player) > 0: split = re.findall('\\S*', player[0]) tag = "#" + split[2].lower() + split[4].lower() paid = "x" in split[6].lower() wallet = "" if "p" in split[6].lower(): wallet = "#przelew" elif "z" in split[6].lower(): wallet = "#zgorki" elif "g" in split[6].lower(): wallet = "#gotowka" operations.append("$najemSali #czecha " + tag + " -10,25 PLN") if paid: operations.append("$najemSali #czecha " + tag + " " + wallet + " +11 PLN") if len(operations) > 0: login = config['volleyball_login']
def test_get_config_value_is_not_set(self): config_path = "tests/dummy_config.yaml" with self.assertRaises(Exception) as context: rbmq_port = functions.get_config("RABBITMQ_SERVER_DETAILS", "SERVER_IP", config_path) self.assertTrue("An exception occured" in str(context.exception))
def index(): return render_template('/admin/login.html', meta_content={'weibo': get_config('login.weibo.meta_content'), "qq": get_config('login.qq.meta_content')})
from . import app import os from log import log def scheduler_listener(event): if event.code == EVENT_JOB_ERROR: print('The job crashed :(') log.warn("The schedule job crashed because of %s" % repr(event.exception)) else: print('The job executed :)') log.debug("The schedule job %s executed and return value is '%s'" % (event.job_id, event.retval)) if not app.debug or os.environ.get('WERKZEUG_RUN_MAIN') == 'true': scheduler = BackgroundScheduler() # job store if safe_get_config("scheduler.job_store", "memory") == "mysql": scheduler.add_jobstore('sqlalchemy', url=get_config("scheduler.job_store_url")) # listener # do we need listen EVENT_JOB_MISSED? scheduler.add_listener(scheduler_listener, EVENT_JOB_EXECUTED | EVENT_JOB_ERROR) scheduler.start()
from functions import safe_get_config, get_config from apscheduler.schedulers.background import BackgroundScheduler from apscheduler.events import EVENT_JOB_EXECUTED, EVENT_JOB_ERROR from . import app import os from log import log def scheduler_listener(event): if event.code == EVENT_JOB_ERROR: print('The job crashed :(') log.warn("The schedule job crashed because of %s" % repr(event.exception)) else: print('The job executed :)') log.debug("The schedule job %s executed and return value is '%s'" % (event.job_id, event.retval)) if not app.debug or os.environ.get('WERKZEUG_RUN_MAIN') == 'true': scheduler = BackgroundScheduler() # job store if safe_get_config("scheduler.job_store", "memory") == "mysql": scheduler.add_jobstore('sqlalchemy', url=get_config("scheduler.job_store_url")) # listener # do we need listen EVENT_JOB_MISSED? scheduler.add_listener(scheduler_listener, EVENT_JOB_EXECUTED | EVENT_JOB_ERROR) scheduler.start()