def querySlowSQL(DBClusterId): SQL = 'SHOW PROCESSLIST;' logging.info('Function:querySlowSQL(), DBClusterId: ['+DBClusterId+']') print(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())+' Function:querySlowSQL(), DBClusterId: ['+DBClusterId+']') try: dbconn = db.mysqlClient( polardb.getDBClusterEndpoints(DBClusterId), config.get('CONFIG.DB_USERNAME'), config.get('CONFIG.DB_PASSWORD'), 'mysql') dbconn.dbconnect() dbconn.dbcursor() processlist_results = dbconn.dbquery(SQL) except Exception: logging.error('Function: querySlowSQL(), MySQLException.', exc_info = True) print(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())+' Function: querySlowSQL(), MySQLException.') else: MysqlUserList = [] # 遍历SHOW PROCESSLIST的结果 for x in range(len(processlist_results)): for y in range(len(processlist_results[x])): # 查找哪些用户存在慢SQL,COMMAND为'Query',执行时长超过预算阈值,且SQL语句为SELECT开头的。 if processlist_results[x][y] == 'Query' and processlist_results[x][5] >= int(config.get('CONFIG.SLOWSQL_QUERY_TIME')) and str(processlist_results[x][7]).startswith('select'): if processlist_results[x][1] not in MysqlUserList: MysqlUserList.append(processlist_results[x][1]) dbconn.dbclose() return MysqlUserList logging.info('Function:querySlowSQL(), MysqlUserList: ['+str(MysqlUserList)+']') print(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())+' Function:querySlowSQL(), MysqlUserList: ['+str(MysqlUserList)+']')
def update_config(): """Updates the local configuration from the config service.""" config_set = Configuration.cached().config_set revision, template_config = config.get( config_set, 'templates.cfg', dest_type=config_pb2.InstanceTemplateConfig, ) _, manager_config = config.get( config_set, 'managers.cfg', dest_type=config_pb2.InstanceGroupManagerConfig, revision=revision, ) context = config.validation_context.Context.logging() validate_template_config(template_config, context) if context.result().has_errors: logging.error('Not updating configuration due to errors in templates.cfg') return context = config.validation_context.Context.logging() validate_manager_config(manager_config, context) if context.result().has_errors: logging.error('Not updating configuration due to errors in managers.cfg') return stored_config = Configuration.fetch() if stored_config.revision != revision: logging.info('Updating configuration to %s', revision) stored_config.modify( manager_config=protobuf.text_format.MessageToString(manager_config), revision=revision, template_config=protobuf.text_format.MessageToString(template_config), )
def onPolardbInspection(): # 初始化阿里云PolarDB API客户端 aliClient = aliyun_polardb_api.polardbClient( config.get('CONFIG.accessKeyId'), config.get('CONFIG.accessSecret'), config.get('CONFIG.regionId'))
def sendMsg(message): headers = {'Content-Type: application/json;charset=utf-8'} wxwork_robot_url = "https://qyapi.weixin.qq.com/cgi-bin/webhook/send?key="+config.get('CONFIG.WXWORK_ROBOT_KEY') wxwork_text_json = { "msgtype": "text", "text": { "content": message, "mentioned_list": json.loads(config.get('CONFIG.WXWORK_USER_LIST')), } } reqContent = str(req.post(wxwork_robot_url, json.dumps(wxwork_text_json), headers).content) logging.debug('Function sendMsg():'+reqContent) print(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())+' Function sendMsg():'+reqContent)
def update_config(): """Updates the local configuration from the config service.""" config_set = Configuration.cached().config_set revision, template_config = config.get( config_set, 'templates.cfg', dest_type=config_pb2.InstanceTemplateConfig, ) _, manager_config = config.get( config_set, 'managers.cfg', dest_type=config_pb2.InstanceGroupManagerConfig, revision=revision, ) context = config.validation_context.Context.logging() validate_template_config(template_config, context) if context.result().has_errors: logging.error( 'Not updating configuration due to errors in templates.cfg') return context = config.validation_context.Context.logging() validate_manager_config(manager_config, context) if context.result().has_errors: logging.error( 'Not updating configuration due to errors in managers.cfg') return stored_config = Configuration.fetch() if stored_config.revision != revision: logging.info('Updating configuration to %s', revision) stored_config.modify( manager_config=protobuf.text_format.MessageToString( manager_config), revision=revision, template_config=protobuf.text_format.MessageToString( template_config), )
def upload_pdf(self): oss_file_url = None try: oss_file_url = OSSFileProxy().upload( open(self.pdf_path, 'rb'), root=config.get('OSS.root'), file_name=f"dashboard_pdf/{self.pdf_name}") self.info("上传报告pdf到oss:" + oss_file_url) except BaseException as ex: raise Exception('上传oss失败,错误内容:' + str(ex)) finally: if oss_file_url: self.update_task_data({ "download_url": oss_file_url, "status": 2 }) if os.path.exists(self.pdf_path): os.remove(self.pdf_path)
async def run(self): path = config.get('Chromium.path', '') browser = None try: browser = await launch(headless=True, executablePath=path, args=['--disable-infobars', '--no-sandbox']) page = await browser.newPage() # 设置cookie await page.setCookie(self.gen_cookie()) await page.goto(self.url, { 'timeout': 20 * 1000, 'waitUntil': ['load', 'networkidle0'] }) # 设置视窗宽高 width, height = await self.get_width_height(page) await page.setViewport({'width': width, 'height': height}) # 等待浏览器reload time.sleep(1) # 导出pdf await page.emulateMedia('screen') await page.pdf(path=self.pdf_path, options={ 'width': width, 'height': height, 'printBackground': True }) # 关闭浏览器 await browser.close() self.upload_pdf() except Exception as ex: print(traceback.format_exc()) self.update_task_data({"status": 3}) raise Exception('导出pdf失败,错误内容:' + str(ex)) finally: if browser: await browser.close() time.sleep(1) self.kill_crawler() print("成功,更新数据。")
def test_cannot_load_config(self): self.provider.get_async.side_effect = ValueError with self.assertRaises(config.CannotLoadConfigError): config.get('services/foo', 'bar.cfg')
def test_get(self): revision, cfg = config.get( 'services/foo', 'bar.cfg', test_config_pb2.Config) self.assertEqual(revision, 'deadbeef') self.assertEqual(cfg.param, 'value')
# coding=utf-8 from components import config import redis, json # Redis Connection Pool POOL = redis.ConnectionPool(host=config.get('CONFIG.REDIS_HOST'), port=config.get('CONFIG.REDIS_PORT'), password=config.get('CONFIG.REDIS_PASS'), max_connections=10, db=0) CONN = redis.StrictRedis(connection_pool=POOL, decode_responses=True) DBClusterInfoAllJson = json.loads(CONN.get("DBClusterInfoAll")) def getDBClusterIdList(): DBClusterIdList = [] for i in range(len(DBClusterInfoAllJson)): DBClusterIdList.append(DBClusterInfoAllJson[i]['DBClusterId']) return DBClusterIdList def getDBNodesIdList(DBClusterId): for i in range(len(DBClusterInfoAllJson)): if DBClusterInfoAllJson[i]['DBClusterId'] == DBClusterId: return DBClusterInfoAllJson[i]['DBNodes'] break def getDBClusterEndpoints(DBClusterId): for i in range(len(DBClusterInfoAllJson)):
# coding = utf-8 import redis import json, ast from components import aliyun_polardb_api from components import config print(config.get('CONFIG.REDIS_PASS')) conn = redis.StrictRedis(host=config.get('CONFIG.REDIS_HOST'), port=config.get('CONFIG.REDIS_PORT'), password=config.get('CONFIG.REDIS_PASS'), db=0,decode_responses=True) aliClient = aliyun_polardb_api.polardbClient( config.get('CONFIG.accessKeyId'), config.get('CONFIG.accessSecret'), config.get('CONFIG.regionId')) DBClusterInfoAllList = aliClient.getDBClusterInfoAllList() conn.set("DBClusterInfoAll", json.dumps(DBClusterInfoAllList)) UsersDict = {'USERNAME1': 'PASSWORD1', 'USERNAME2': 'PASSWORD2', ......} conn.set("UsersDict", json.dumps(UsersDict))