def __init__(self): try: self.sys_config = SysConfig().sys_config self.inception_host = self.sys_config.get('inception_host') if self.sys_config.get('inception_port'): self.inception_port = int( self.sys_config.get('inception_port')) else: self.inception_port = 6669 self.inception_remote_backup_host = self.sys_config.get( 'inception_remote_backup_host') if self.sys_config.get('inception_remote_backup_port'): self.inception_remote_backup_port = int( self.sys_config.get('inception_remote_backup_port')) else: self.inception_remote_backup_port = 3306 self.inception_remote_backup_user = self.sys_config.get( 'inception_remote_backup_user') self.inception_remote_backup_password = self.sys_config.get( 'inception_remote_backup_password') self.prpCryptor = Prpcrypt() except AttributeError as a: print("Error: %s" % a) except ValueError as v: print("Error: %s" % v)
def process_status(request): cluster_name = request.POST.get('cluster_name') command_type = request.POST.get('command_type') base_sql = "select id, user, host, db, command, time, state, ifnull(info,'') as info from information_schema.processlist" # 判断是RDS还是其他实例 if SysConfig().sys_config.get('aliyun_rds_manage') == 'true': result = aliyun_process_status(request) else: master_info = master_config.objects.get(cluster_name=cluster_name) if command_type == 'All': sql = base_sql + ";" elif command_type == 'Not Sleep': sql = "{} where command<>'Sleep';".format(base_sql) else: sql = "{} where command= '{}';".format(base_sql, command_type) processlist = dao.mysql_query( master_info.master_host, master_info.master_port, master_info.master_user, prpCryptor.decrypt(master_info.master_password), 'information_schema', sql) column_list = processlist['column_list'] rows = [] for row in processlist['rows']: row_info = {} for row_index, row_item in enumerate(row): row_info[column_list[row_index]] = row_item rows.append(row_info) result = {'status': 0, 'msg': 'ok', 'data': rows} # 返回查询结果 return HttpResponse(json.dumps(result), content_type='application/json')
def create_kill_session(request): cluster_name = request.POST.get('cluster_name') ThreadIDs = request.POST.get('ThreadIDs') result = {'status': 0, 'msg': 'ok', 'data': []} # 判断是RDS还是其他实例 if len(AliyunRdsConfig.objects.filter(cluster_name=cluster_name)) > 0: if SysConfig().sys_config.get('aliyun_rds_manage') == 'true': result = aliyun_create_kill_session(request) else: raise Exception('未开启rds管理,无法查看rds数据!') else: master_info = MasterConfig.objects.get(cluster_name=cluster_name) ThreadIDs = ThreadIDs.replace('[', '').replace(']', '') sql = "select concat('kill ', id, ';') from information_schema.processlist where id in ({});".format( ThreadIDs) all_kill_sql = dao.mysql_query( master_info.master_host, master_info.master_port, master_info.master_user, prpCryptor.decrypt(master_info.master_password), 'information_schema', sql) kill_sql = '' for row in all_kill_sql['rows']: kill_sql = kill_sql + row[0] result['data'] = kill_sql # 返回查询结果 return HttpResponse(json.dumps(result, cls=ExtendJSONEncoder, bigint_as_string=True), content_type='application/json')
def process(request): instance_name = request.POST.get('instance_name') command_type = request.POST.get('command_type') base_sql = "select id, user, host, db, command, time, state, ifnull(info,'') as info from information_schema.processlist" # 判断是RDS还是其他实例 if len(AliyunRdsConfig.objects.filter(instance_name=instance_name)) > 0: if SysConfig().sys_config.get('aliyun_rds_manage') == 'true': result = aliyun_process_status(request) else: raise Exception('未开启rds管理,无法查看rds数据!') else: if command_type == 'All': sql = base_sql + ";" elif command_type == 'Not Sleep': sql = "{} where command<>'Sleep';".format(base_sql) else: sql = "{} where command= '{}';".format(base_sql, command_type) processlist = Dao(instance_name=instance_name).mysql_query( 'information_schema', sql) column_list = processlist['column_list'] rows = [] for row in processlist['rows']: row_info = {} for row_index, row_item in enumerate(row): row_info[column_list[row_index]] = row_item rows.append(row_info) result = {'status': 0, 'msg': 'ok', 'data': rows} # 返回查询结果 return HttpResponse(json.dumps(result, cls=ExtendJSONEncoder, bigint_as_string=True), content_type='application/json')
def __init__(self): sys_config = SysConfig().sys_config self.MAIL_REVIEW_SMTP_SERVER = sys_config.get('mail_smtp_server') if sys_config.get('mail_smtp_port'): self.MAIL_REVIEW_SMTP_PORT = int(sys_config.get('mail_smtp_port')) else: self.MAIL_REVIEW_SMTP_PORT = 25 self.MAIL_REVIEW_FROM_ADDR = sys_config.get('mail_smtp_user') self.MAIL_REVIEW_FROM_PASSWORD = sys_config.get('mail_smtp_password')
def log_mail_record(login_failed_message): mail_title = 'login archer' logger.warning(login_failed_message) dbaAddr = [ email['email'] for email in users.objects.filter(role='DBA').values('email') ] if SysConfig().sys_config.get('mail') == 'true': MailSender().sendEmail(mail_title, login_failed_message, dbaAddr)
def loginAuthenticate(username, password): """登录认证,包含一个登录失败计数器,5分钟内连续失败5次的账号,会被锁定5分钟""" sys_config = SysConfig().sys_config if sys_config.get('lock_cnt_threshold'): lockCntThreshold = int(sys_config.get('lock_cnt_threshold')) else: lockCntThreshold = 5 if sys_config.get('lock_time_threshold'): lockTimeThreshold = int(sys_config.get('lock_time_threshold')) else: lockTimeThreshold = 300 # 服务端二次验证参数 if username == "" or password == "" or username is None or password is None: result = {'status': 2, 'msg': '登录用户名或密码为空,请重新输入!', 'data': ''} elif username in login_failure_counter and login_failure_counter[username][ "cnt"] >= lockCntThreshold and ( datetime.datetime.now() - login_failure_counter[username] ["last_failure_time"]).seconds <= lockTimeThreshold: log_mail_record( 'user:{},login failed, account locking...'.format(username)) result = {'status': 3, 'msg': '登录失败超过5次,该账号已被锁定5分钟!', 'data': ''} else: # 登录 user = authenticate(username=username, password=password) print(type(user)) # 登录成功 if user: # 如果登录失败计数器中存在该用户名,则清除之 if username in login_failure_counter: login_failure_counter.pop(username) result = {'status': 0, 'msg': 'ok', 'data': user} # 登录失败 else: if username not in login_failure_counter: # 第一次登录失败,登录失败计数器中不存在该用户,则创建一个该用户的计数器 login_failure_counter[username] = { "cnt": 1, "last_failure_time": datetime.datetime.now() } else: if (datetime.datetime.now() - login_failure_counter[username]["last_failure_time"] ).seconds <= lockTimeThreshold: login_failure_counter[username]["cnt"] += 1 else: # 上一次登录失败时间早于5分钟前,则重新计数。以达到超过5分钟自动解锁的目的。 login_failure_counter[username]["cnt"] = 1 login_failure_counter[username][ "last_failure_time"] = datetime.datetime.now() log_mail_record('user:{},login failed, fail count:{}'.format( username, login_failure_counter[username]["cnt"])) result = {'status': 1, 'msg': '用户名或密码错误,请重新输入!', 'data': ''} return result
def is_autoreview(workflowid): workflowDetail = SqlWorkflow.objects.get(id=workflowid) sql_content = workflowDetail.sql_content cluster_name = workflowDetail.cluster_name db_name = workflowDetail.db_name is_manual = workflowDetail.is_manual # 删除注释语句 sql_content = ''.join( map( lambda x: re.compile(r'(^--\s+.*|^/\*.*\*/;\s*$)').sub( '', x, count=1), sql_content.splitlines(1))).strip() # 获取正则表达式 auto_review_regex = SysConfig().sys_config.get( 'auto_review_regex', '^alter|^create|^drop|^truncate|^rename|^delete') p = re.compile(auto_review_regex) # 判断是否匹配到需要手动审核的语句 is_autoreview = True for row in sql_content.strip(';').split(';'): if p.match(row.strip().lower()): is_autoreview = False break if is_autoreview: # 更新影响行数加测,单条更新语句影响行数超过指定数量则需要人工审核 inception_review = InceptionDao().sqlautoReview( sql_content, cluster_name, db_name) for review_result in inception_review: SQL = review_result[5] Affected_rows = review_result[6] if re.match(r"^update", SQL.strip().lower()): if int(Affected_rows) > int(SysConfig().sys_config.get( 'auto_review_max_update_rows', 0)): is_autoreview = False break # inception不支持语法都需要审批 if is_manual == 1: is_autoreview = False return is_autoreview
def sqladvisorcheck(request): sqlContent = request.POST.get('sql_content') clusterName = request.POST.get('cluster_name') dbName = request.POST.get('db_name') verbose = request.POST.get('verbose') finalResult = {'status': 0, 'msg': 'ok', 'data': []} # 服务器端参数验证 if sqlContent is None or clusterName is None: finalResult['status'] = 1 finalResult['msg'] = '页面提交参数可能为空' return HttpResponse(json.dumps(finalResult), content_type='application/json') sqlContent = sqlContent.strip() if sqlContent[-1] != ";": finalResult['status'] = 1 finalResult['msg'] = 'SQL语句结尾没有以;结尾,请重新修改并提交!' return HttpResponse(json.dumps(finalResult), content_type='application/json') if verbose is None or verbose == '': verbose = 1 # 取出主库的连接信息 cluster_info = master_config.objects.get(cluster_name=clusterName) # 提交给sqladvisor获取审核结果 sqladvisor_path = SysConfig().sys_config.get('sqladvisor') sqlContent = sqlContent.strip().replace('"', '\\"').replace('`', '\`').replace( '\n', ' ') try: p = subprocess.Popen( sqladvisor_path + ' -h "%s" -P "%s" -u "%s" -p "%s\" -d "%s" -v %s -q "%s"' % (str(cluster_info.master_host), str( cluster_info.master_port), str(cluster_info.master_user), str(prpCryptor.decrypt(cluster_info.master_password), ), str(dbName), verbose, sqlContent), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, universal_newlines=True) stdout, stderr = p.communicate() finalResult['data'] = stdout except Exception: finalResult['data'] = 'sqladvisor运行报错,请联系管理员' return HttpResponse(json.dumps(finalResult), content_type='application/json')
def execute_skipinc_call_back(workflowId, clusterName, db_name, sql_content, url): workflowDetail = workflow.objects.get(id=workflowId) # 获取审核人 reviewMan = workflowDetail.review_man # 获取实例连接信息 masterInfo = getMasterConnStr(clusterName) try: # 执行sql t_start = time.time() execute_result = dao.mysql_execute(masterInfo['masterHost'], masterInfo['masterPort'], masterInfo['masterUser'], masterInfo['masterPassword'], db_name, sql_content) t_end = time.time() execute_time = "%5s" % "{:.4f}".format(t_end - t_start) execute_result['execute_time'] = execute_time + 'sec' workflowDetail = workflow.objects.get(id=workflowId) if execute_result.get('Warning'): workflowDetail.status = Const.workflowStatus['exception'] elif execute_result.get('Error'): workflowDetail.status = Const.workflowStatus['exception'] else: workflowDetail.status = Const.workflowStatus['finish'] workflowDetail.finish_time = timezone.now() workflowDetail.execute_result = json.dumps(execute_result) workflowDetail.is_manual = 1 workflowDetail.audit_remark = '' workflowDetail.is_backup = '否' # 关闭后重新获取连接,防止超时 connection.close() workflowDetail.save() except Exception as e: logger.error(e) # 如果执行完毕了,则根据settings.py里的配置决定是否给提交者和DBA一封邮件提醒,DBA需要知晓审核并执行过的单子 if SysConfig().sys_config.get('mail') == 'true': engineer = workflowDetail.engineer workflowStatus = workflowDetail.status workflowName = workflowDetail.workflow_name strTitle = "SQL上线工单执行完毕 # " + str(workflowId) strContent = "发起人:" + engineer + "\n审核人:" + reviewMan + "\n工单地址:" + url \ + "\n工单名称: " + workflowName + "\n执行结果:" + workflowStatus # 邮件通知申请人,审核人,抄送DBA notify_users = reviewMan.split(',') notify_users.append(engineer) listToAddr = [email['email'] for email in users.objects.filter(username__in=notify_users).values('email')] listCcAddr = [email['email'] for email in users.objects.filter(role='DBA').values('email')] MailSender().sendEmail(strTitle, strContent, listToAddr, listCcAddr=listCcAddr)
def __init__(self): self.sys_config = SysConfig().sys_config self.inception_host = self.sys_config.get('inception_host') if self.sys_config.get('inception_port'): self.inception_port = int(self.sys_config.get('inception_port')) else: self.inception_port = 6669 self.inception_remote_backup_host = self.sys_config.get('inception_remote_backup_host') if self.sys_config.get('inception_remote_backup_port'): self.inception_remote_backup_port = int(self.sys_config.get('inception_remote_backup_port')) else: self.inception_remote_backup_port = 3306 self.inception_remote_backup_user = self.sys_config.get('inception_remote_backup_user') self.inception_remote_backup_password = self.sys_config.get('inception_remote_backup_password') self.prpCryptor = Prpcrypt()
def tablesapce(request): cluster_name = request.POST.get('cluster_name') # 判断是RDS还是其他实例 if len(AliyunRdsConfig.objects.filter(cluster_name=cluster_name)) > 0: if SysConfig().sys_config.get('aliyun_rds_manage') == 'true': result = aliyun_sapce_status(request) else: raise Exception('未开启rds管理,无法查看rds数据!') else: master_info = MasterConfig.objects.get(cluster_name=cluster_name) sql = ''' SELECT table_schema, table_name, engine, TRUNCATE((data_length+index_length+data_free)/1024/1024,2) AS total_size, table_rows, TRUNCATE(data_length/1024/1024,2) AS data_size, TRUNCATE(index_length/1024/1024,2) AS index_size, TRUNCATE(data_free/1024/1024,2) AS data_free, TRUNCATE(data_free/(data_length+index_length+data_free)*100,2) AS pct_free FROM information_schema.tables WHERE table_schema NOT IN ('information_schema', 'performance_schema', 'mysql', 'test', 'sys') ORDER BY total_size DESC LIMIT 14;'''.format(cluster_name) table_space = dao.mysql_query( master_info.master_host, master_info.master_port, master_info.master_user, prpCryptor.decrypt(master_info.master_password), 'information_schema', sql) column_list = table_space['column_list'] rows = [] for row in table_space['rows']: row_info = {} for row_index, row_item in enumerate(row): row_info[column_list[row_index]] = row_item rows.append(row_info) result = {'status': 0, 'msg': 'ok', 'data': rows} # 返回查询结果 return HttpResponse(json.dumps(result, cls=ExtendJSONEncoder, bigint_as_string=True), content_type='application/json')
def __init__(self): try: sys_config = SysConfig().sys_config self.MAIL_REVIEW_SMTP_SERVER = sys_config.get('mail_smtp_server') if sys_config.get('mail_smtp_port'): self.MAIL_REVIEW_SMTP_PORT = int( sys_config.get('mail_smtp_port')) else: self.MAIL_REVIEW_SMTP_PORT = 25 self.MAIL_REVIEW_FROM_ADDR = sys_config.get('mail_smtp_user') self.MAIL_REVIEW_FROM_PASSWORD = sys_config.get( 'mail_smtp_password') except AttributeError as a: print("Error: %s" % a) except ValueError as v: print("Error: %s" % v)
def kill_session(request): cluster_name = request.POST.get('cluster_name') request_params = request.POST.get('request_params') result = {'status': 0, 'msg': 'ok', 'data': []} # 判断是RDS还是其他实例 if SysConfig().sys_config.get('aliyun_rds_manage') == 'true': result = aliyun_kill_session(request) else: master_info = master_config.objects.get(cluster_name=cluster_name) kill_sql = request_params dao.mysql_execute(master_info.master_host, master_info.master_port, master_info.master_user, prpCryptor.decrypt(master_info.master_password), 'information_schema', kill_sql) # 返回查询结果 return HttpResponse(json.dumps(result), content_type='application/json')
def kill_session(request): instance_name = request.POST.get('instance_name') request_params = request.POST.get('request_params') result = {'status': 0, 'msg': 'ok', 'data': []} # 判断是RDS还是其他实例 if len(AliyunRdsConfig.objects.filter(instance_name=instance_name)) > 0: if SysConfig().sys_config.get('aliyun_rds_manage') == 'true': result = aliyun_kill_session(request) else: raise Exception('未开启rds管理,无法查看rds数据!') else: kill_sql = request_params Dao(instance_name=instance_name).mysql_execute('information_schema', kill_sql) # 返回查询结果 return HttpResponse(json.dumps(result, cls=ExtendJSONEncoder, bigint_as_string=True), content_type='application/json')
def execute_call_back(workflowId, clusterName, url): workflowDetail = workflow.objects.get(id=workflowId) # 获取审核人 reviewMan = workflowDetail.review_man dictConn = getMasterConnStr(clusterName) try: # 交给inception先split,再执行 (finalStatus, finalList) = InceptionDao().executeFinal(workflowDetail, dictConn) # 封装成JSON格式存进数据库字段里 strJsonResult = json.dumps(finalList) workflowDetail = workflow.objects.get(id=workflowId) workflowDetail.execute_result = strJsonResult workflowDetail.finish_time = timezone.now() workflowDetail.status = finalStatus workflowDetail.is_manual = 0 workflowDetail.audit_remark = '' # 关闭后重新获取连接,防止超时 connection.close() workflowDetail.save() except Exception as e: logger.error(e) # 如果执行完毕了,则根据settings.py里的配置决定是否给提交者和DBA一封邮件提醒,DBA需要知晓审核并执行过的单子 if SysConfig().sys_config.get('mail') == 'true': # 给申请人,DBA各发一封邮件 engineer = workflowDetail.engineer workflowStatus = workflowDetail.status workflowName = workflowDetail.workflow_name strTitle = "SQL上线工单执行完毕 # " + str(workflowId) strContent = "发起人:" + engineer + "\n审核人:" + reviewMan + "\n工单地址:" + url \ + "\n工单名称: " + workflowName + "\n执行结果:" + workflowStatus # 邮件通知申请人,审核人,抄送DBA notify_users = reviewMan.split(',') notify_users.append(engineer) listToAddr = [email['email'] for email in users.objects.filter(username__in=notify_users).values('email')] listCcAddr = [email['email'] for email in users.objects.filter(role='DBA').values('email')] MailSender().sendEmail(strTitle, strContent, listToAddr, listCcAddr=listCcAddr)
def send_msg(workflowDetail, url): mailSender = MailSender() sys_config = SysConfig().sys_config # 获取当前审批和审批流程 audit_auth_group, current_audit_auth_group = Workflow.review_info(workflowDetail.id, 2) # 如果执行完毕了,则根据配置决定是否给提交者和DBA一封邮件提醒,DBA需要知晓审核并执行过的单子 msg_title = "[{}]工单{}#{}".format(WorkflowDict.workflow_type['sqlreview_display'], workflowDetail.status, workflowDetail.id) msg_content = '''发起人:{}\n审批流程:{}\n工单名称:{}\n工单地址:{}\n工单详情预览:{}\n'''.format( workflowDetail.engineer_display, audit_auth_group, workflowDetail.workflow_name, url, workflowDetail.sql_content[0:500]) if sys_config.get('mail') == 'true': # 邮件通知申请人,审核人,抄送DBA notify_users = workflowDetail.review_man.split(',') notify_users.append(workflowDetail.engineer) listToAddr = [email['email'] for email in Users.objects.filter(username__in=notify_users).values('email')] listCcAddr = [email['email'] for email in auth_group_users(auth_group_names=['DBA'], group_id=workflowDetail.group_id).values('email')] mailSender.send_email(msg_title, msg_content, listToAddr, listCcAddr=listCcAddr) if sys_config.get('ding') == 'true': # 钉钉通知申请人,审核人,抄送DBA webhook_url = SqlGroup.objects.get(group_id=workflowDetail.group_id).ding_webhook MailSender.send_ding(webhook_url, msg_title + '\n' + msg_content)
def kill_session(request): cluster_name = request.POST.get('cluster_name') request_params = request.POST.get('request_params') result = {'status': 0, 'msg': 'ok', 'data': []} # 判断是RDS还是其他实例 if len(AliyunRdsConfig.objects.filter(cluster_name=cluster_name)) > 0: if SysConfig().sys_config.get('aliyun_rds_manage') == 'true': result = aliyun_kill_session(request) else: raise Exception('未开启rds管理,无法查看rds数据!') else: master_info = MasterConfig.objects.get(cluster_name=cluster_name) kill_sql = request_params dao.mysql_execute(master_info.master_host, master_info.master_port, master_info.master_user, prpCryptor.decrypt(master_info.master_password), 'information_schema', kill_sql) # 返回查询结果 return HttpResponse(json.dumps(result, cls=ExtendJSONEncoder, bigint_as_string=True), content_type='application/json')
def _send(audit_id, msg_type, **kwargs): msg_sender = MailSender() sys_config = SysConfig().sys_config audit_info = WorkflowAudit.objects.get(audit_id=audit_id) workflow_id = audit_info.workflow_id workflow_type = audit_info.workflow_type status = audit_info.current_status workflow_title = audit_info.workflow_title workflow_from = audit_info.create_user_display workflow_url = kwargs.get('workflow_url') webhook_url = SqlGroup.objects.get(group_id=audit_info.group_id).ding_webhook audit_info = WorkflowAudit.objects.get(workflow_id=workflow_id, workflow_type=workflow_type) if audit_info.audit_auth_groups == '': workflow_auditors = '无需审批' else: try: workflow_auditors = '->'.join([Group.objects.get(id=auth_group_id).name for auth_group_id in audit_info.audit_auth_groups.split(',')]) except Exception: workflow_auditors = audit_info.audit_auth_groups if audit_info.current_audit == '-1': current_workflow_auditors = None else: try: current_workflow_auditors = Group.objects.get(id=audit_info.current_audit).name except Exception: current_workflow_auditors = audit_info.current_audit # 准备消息内容 if workflow_type == WorkflowDict.workflow_type['query']: workflow_type_display = WorkflowDict.workflow_type['query_display'] workflow_detail = QueryPrivilegesApply.objects.get(apply_id=workflow_id) workflow_audit_remark = '' if workflow_detail.priv_type == 1: workflow_content = '''数据库清单:{}\n授权截止时间:{}\n结果集:{}\n'''.format( workflow_detail.db_list, datetime.datetime.strftime(workflow_detail.valid_date, '%Y-%m-%d %H:%M:%S'), workflow_detail.limit_num) elif workflow_detail.priv_type == 2: workflow_content = '''数据库:{}\n表清单:{}\n授权截止时间:{}\n结果集:{}\n'''.format( workflow_detail.db_list, workflow_detail.table_list, datetime.datetime.strftime(workflow_detail.valid_date, '%Y-%m-%d %H:%M:%S'), workflow_detail.limit_num) elif workflow_type == WorkflowDict.workflow_type['sqlreview']: workflow_type_display = WorkflowDict.workflow_type['sqlreview_display'] workflow_detail = SqlWorkflow.objects.get(pk=workflow_id) workflow_audit_remark = workflow_detail.audit_remark workflow_content = workflow_detail.sql_content else: raise Exception('工单类型不正确') # 准备消息格式 if status == WorkflowDict.workflow_status['audit_wait']: # 申请阶段 msg_title = "[{}]新的工单申请#{}".format(workflow_type_display, audit_id) # 接收人,发送给该项目组内对应权限组所有的用户 auth_group_names = Group.objects.get(id=audit_info.current_audit).name msg_email_reciver = [user.email for user in auth_group_users([auth_group_names], audit_info.group_id)] # 抄送对象 email_cc = kwargs.get('email_cc', []) msg_email_cc = email_cc msg_content = '''发起人:{}\n审批流程:{}\n当前审批:{}\n工单名称:{}\n工单地址:{}\n工单详情预览:{}\n'''.format( workflow_from, workflow_auditors, current_workflow_auditors, workflow_title, workflow_url, workflow_content) elif status == WorkflowDict.workflow_status['audit_success']: # 审核通过 msg_title = "[{}]工单审核通过#{}".format(workflow_type_display, audit_id) # 接收人 msg_email_reciver = [Users.objects.get(username=audit_info.create_user).email] # 抄送对象 msg_email_cc = kwargs.get('email_cc', []) msg_content = '''发起人:{}\n审批流程:{}\n工单名称:{}\n工单地址:{}\n工单详情预览:{}\n'''.format( workflow_from, workflow_auditors, workflow_title, workflow_url, workflow_content) elif status == WorkflowDict.workflow_status['audit_reject']: # 审核驳回 msg_title = "[{}]工单被驳回#{}".format(workflow_type_display, audit_id) # 接收人 msg_email_reciver = [Users.objects.get(username=audit_info.create_user).email] msg_email_cc = [] msg_content = '''工单名称:{}\n工单地址:{}\n驳回原因:{}\n提醒:此工单被审核不通过,请按照驳回原因进行修改!'''.format( workflow_title, workflow_url, workflow_audit_remark) elif status == WorkflowDict.workflow_status['audit_abort']: # 审核取消,通知所有审核人 msg_title = "[{}]提交人主动终止工单#{}".format(workflow_type_display, audit_id) # 接收人,发送给该项目组内对应权限组所有的用户 auth_group_names = [Group.objects.get(id=auth_group_id).name for auth_group_id in audit_info.audit_auth_groups.split(',')] msg_email_reciver = [user.email for user in auth_group_users(auth_group_names, audit_info.group_id)] msg_email_cc = [] msg_content = '''发起人:{}\n工单名称:{}\n工单地址:{}\n提醒:提交人主动终止流程'''.format( workflow_from, workflow_title, workflow_url) else: raise Exception('工单状态不正确') if isinstance(msg_email_reciver, str): msg_email_reciver = [msg_email_reciver] if isinstance(msg_email_cc, str): msg_email_cc = [msg_email_cc] # 判断是发送钉钉还是发送邮件 try: if msg_type == 0: if sys_config.get('mail') == 'true': msg_sender.send_email(msg_title, msg_content, msg_email_reciver, listCcAddr=msg_email_cc) if sys_config.get('ding') == 'true': msg_sender.send_ding(webhook_url, msg_title + '\n' + msg_content) if msg_type == 1: if sys_config.get('mail') == 'true': msg_sender.send_email(msg_title, msg_content, msg_email_reciver, listCcAddr=msg_email_cc) elif msg_type == 2: if sys_config.get('ding') == 'true': msg_sender.send_ding(webhook_url, msg_title + '\n' + msg_content) except Exception: logger.error(traceback.format_exc())
def query_priv_check(user, cluster_name, dbName, sqlContent, limit_num): finalResult = {'status': 0, 'msg': 'ok', 'data': {}} # 检查用户是否有该数据库/表的查询权限 if user.is_superuser: if SysConfig().sys_config.get('admin_query_limit'): user_limit_num = int( SysConfig().sys_config.get('admin_query_limit')) else: user_limit_num = 0 if int(limit_num) == 0: limit_num = int(user_limit_num) else: limit_num = min(int(limit_num), int(user_limit_num)) pass # 查看表结构和执行计划,inception会报错,故单独处理,explain直接跳过不做校验 elif re.match(r"^show\s+create\s+table", sqlContent.lower()): tb_name = re.sub('^show\s+create\s+table', '', sqlContent, count=1, flags=0).strip() # 先判断是否有整库权限 db_privileges = QueryPrivileges.objects.filter( user_name=user.username, cluster_name=cluster_name, db_name=dbName, priv_type=1, valid_date__gte=datetime.datetime.now(), is_deleted=0) # 无整库权限再验证表权限 if len(db_privileges) == 0: tb_privileges = QueryPrivileges.objects.filter( user_name=user.username, cluster_name=cluster_name, db_name=dbName, table_name=tb_name, priv_type=2, valid_date__gte=datetime.datetime.now(), is_deleted=0) if len(tb_privileges) == 0: finalResult['status'] = 1 finalResult[ 'msg'] = '你无' + dbName + '.' + tb_name + '表的查询权限!请先到查询权限管理进行申请' return finalResult # sql查询, 可以校验到表级权限 else: # 首先使用inception的语法树打印获取查询涉及的的表 table_ref_result = datamasking.query_table_ref(sqlContent + ';', cluster_name, dbName) # 正确解析拿到表数据,可以校验表权限 if table_ref_result['status'] == 0: table_ref = table_ref_result['data'] # 获取表信息,校验是否拥有全部表查询权限 QueryPrivilegesOb = QueryPrivileges.objects.filter( user_name=user.username, cluster_name=cluster_name) # 先判断是否有整库权限 for table in table_ref: db_privileges = QueryPrivilegesOb.filter( db_name=table['db'], priv_type=1, valid_date__gte=datetime.datetime.now(), is_deleted=0) # 无整库权限再验证表权限 if len(db_privileges) == 0: tb_privileges = QueryPrivilegesOb.filter( db_name=table['db'], table_name=table['table'], valid_date__gte=datetime.datetime.now(), is_deleted=0) if len(tb_privileges) == 0: finalResult['status'] = 1 finalResult['msg'] = '你无' + table['db'] + '.' + table[ 'table'] + '表的查询权限!请先到查询权限管理进行申请' return finalResult # 获取表数据报错,检查配置文件是否允许继续执行,并进行库权限校验 else: table_ref = None # 校验库权限,防止inception的语法树打印错误时连库权限也未做校验 privileges = QueryPrivileges.objects.filter( user_name=user.username, cluster_name=cluster_name, db_name=dbName, valid_date__gte=datetime.datetime.now(), is_deleted=0) if len(privileges) == 0: finalResult['status'] = 1 finalResult['msg'] = '你无' + dbName + '数据库的查询权限!请先到查询权限管理进行申请' return finalResult if SysConfig().sys_config.get('query_check') == 'true': return table_ref_result else: pass # 获取查询涉及表的最小limit限制 if table_ref: db_list = [table_info['db'] for table_info in table_ref] table_list = [table_info['table'] for table_info in table_ref] user_limit_num = QueryPrivileges.objects.filter( user_name=user.username, cluster_name=cluster_name, db_name__in=db_list, table_name__in=table_list, valid_date__gte=datetime.datetime.now(), is_deleted=0).aggregate(Min('limit_num'))['limit_num__min'] if user_limit_num is None: # 如果表没获取到则获取涉及库的最小limit限制 user_limit_num = QueryPrivileges.objects.filter( user_name=user.username, cluster_name=cluster_name, db_name=dbName, valid_date__gte=datetime.datetime.now(), is_deleted=0).aggregate(Min('limit_num'))['limit_num__min'] else: # 如果表没获取到则获取涉及库的最小limit限制 user_limit_num = QueryPrivileges.objects.filter( user_name=user.username, cluster_name=cluster_name, db_name=dbName, valid_date__gte=datetime.datetime.now(), is_deleted=0).aggregate(Min('limit_num'))['limit_num__min'] if int(limit_num) == 0: limit_num = user_limit_num else: limit_num = min(int(limit_num), user_limit_num) finalResult['data'] = limit_num return finalResult
def query(request): cluster_name = request.POST.get('cluster_name') sqlContent = request.POST.get('sql_content') dbName = request.POST.get('db_name') limit_num = request.POST.get('limit_num') finalResult = {'status': 0, 'msg': 'ok', 'data': {}} # 服务器端参数验证 if sqlContent is None or dbName is None or cluster_name is None or limit_num is None: finalResult['status'] = 1 finalResult['msg'] = '页面提交参数可能为空' return HttpResponse(json.dumps(finalResult), content_type='application/json') sqlContent = sqlContent.strip() if sqlContent[-1] != ";": finalResult['status'] = 1 finalResult['msg'] = 'SQL语句结尾没有以;结尾,请重新修改并提交!' return HttpResponse(json.dumps(finalResult), content_type='application/json') # 获取用户信息 user = request.user # 过滤注释语句和非查询的语句 sqlContent = ''.join( map( lambda x: re.compile(r'(^--\s+.*|^/\*.*\*/;\s*$)').sub( '', x, count=1), sqlContent.splitlines(1))).strip() # 去除空行 sqlContent = re.sub('[\r\n\f]{2,}', '\n', sqlContent) sql_list = sqlContent.strip().split('\n') for sql in sql_list: if re.match(r"^select|^show|^explain", sql.lower()): break else: finalResult['status'] = 1 finalResult['msg'] = '仅支持^select|^show|^explain语法,请联系管理员!' return HttpResponse(json.dumps(finalResult), content_type='application/json') # 取出该实例的连接方式,查询只读账号,按照分号截取第一条有效sql执行 slave_info = SlaveConfig.objects.get(cluster_name=cluster_name) sqlContent = sqlContent.strip().split(';')[0] # 查询权限校验 priv_check_info = query_priv_check(user, cluster_name, dbName, sqlContent, limit_num) if priv_check_info['status'] == 0: limit_num = priv_check_info['data'] else: return HttpResponse(json.dumps(priv_check_info), content_type='application/json') if re.match(r"^explain", sqlContent.lower()): limit_num = 0 # 对查询sql增加limit限制 if re.match(r"^select", sqlContent.lower()): if re.search(r"limit\s+(\d+)$", sqlContent.lower()) is None: if re.search(r"limit\s+\d+\s*,\s*(\d+)$", sqlContent.lower()) is None: sqlContent = sqlContent + ' limit ' + str(limit_num) sqlContent = sqlContent + ';' # 执行查询语句,统计执行时间 t_start = time.time() sql_result = dao.mysql_query(slave_info.slave_host, slave_info.slave_port, slave_info.slave_user, prpCryptor.decrypt(slave_info.slave_password), str(dbName), sqlContent, limit_num) t_end = time.time() cost_time = "%5s" % "{:.4f}".format(t_end - t_start) sql_result['cost_time'] = cost_time # 数据脱敏,同样需要检查配置,是否开启脱敏,语法树解析是否允许出错继续执行 t_start = time.time() if SysConfig().sys_config.get('data_masking') == 'true': # 仅对查询语句进行脱敏 if re.match(r"^select", sqlContent.lower()): try: masking_result = datamasking.data_masking( cluster_name, dbName, sqlContent, sql_result) except Exception: if SysConfig().sys_config.get('query_check') == 'true': finalResult['status'] = 1 finalResult['msg'] = '脱敏数据报错,请联系管理员' return HttpResponse(json.dumps(finalResult), content_type='application/json') else: if masking_result['status'] != 0: if SysConfig().sys_config.get('query_check') == 'true': return HttpResponse(json.dumps(masking_result), content_type='application/json') t_end = time.time() masking_cost_time = "%5s" % "{:.4f}".format(t_end - t_start) sql_result['masking_cost_time'] = masking_cost_time finalResult['data'] = sql_result # 成功的查询语句记录存入数据库 if sql_result.get('Error'): pass else: query_log = QueryLog() query_log.username = user.username query_log.user_display = user.display query_log.db_name = dbName query_log.cluster_name = cluster_name query_log.sqllog = sqlContent if int(limit_num) == 0: limit_num = int(sql_result['effect_row']) else: limit_num = min(int(limit_num), int(sql_result['effect_row'])) query_log.effect_row = limit_num query_log.cost_time = cost_time # 防止查询超时 try: query_log.save() except: connection.close() query_log.save() # 返回查询结果 return HttpResponse(json.dumps(finalResult, cls=ExtendJSONEncoder, bigint_as_string=True), content_type='application/json')
def addworkflowaudit(self, request, workflow_type, workflow_id, **kwargs): result = {'status': 0, 'msg': '', 'data': []} # 检查是否已存在待审核数据 workflowInfo = WorkflowAudit.objects.filter( workflow_type=workflow_type, workflow_id=workflow_id, current_status=WorkflowDict.workflow_status['audit_wait']) if len(workflowInfo) >= 1: result['msg'] = '该工单当前状态为待审核,请勿重复提交' raise Exception(result['msg']) # 获取工单信息 if workflow_type == WorkflowDict.workflow_type['query']: workflow_detail = QueryPrivilegesApply.objects.get( apply_id=workflow_id) workflow_title = workflow_detail.title group_id = workflow_detail.group_id group_name = workflow_detail.group_name create_user = workflow_detail.user_name audit_auth_groups = workflow_detail.audit_auth_groups workflow_remark = '' elif workflow_type == WorkflowDict.workflow_type['sqlreview']: workflow_detail = SqlWorkflow.objects.get(pk=workflow_id) workflow_title = workflow_detail.workflow_name group_id = workflow_detail.group_id group_name = workflow_detail.group_name create_user = workflow_detail.engineer audit_auth_groups = workflow_detail.audit_auth_groups workflow_remark = '' else: result['msg'] = '工单类型不存在' raise Exception(result['msg']) # 校验是否配置审批流程 if audit_auth_groups is None: result['msg'] = '审批流程不能为空,请先配置审批流程' raise Exception(result['msg']) else: audit_auth_groups_list = audit_auth_groups.split(',') # 判断是否无需审核,并且修改审批人为空 if SysConfig().sys_config.get('auto_review', False) == 'true': if workflow_type == WorkflowDict.workflow_type['sqlreview']: if is_autoreview(workflow_id): Workflow = SqlWorkflow.objects.get(id=int(workflow_id)) Workflow.audit_auth_groups = '无需审批' Workflow.status = '审核通过' Workflow.save() audit_auth_groups_list = None # 无审核配置则无需审核,直接通过 if audit_auth_groups_list is None: # 向审核主表插入审核通过的数据 auditInfo = WorkflowAudit() auditInfo.group_id = group_id auditInfo.group_name = group_name auditInfo.workflow_id = workflow_id auditInfo.workflow_type = workflow_type auditInfo.workflow_title = workflow_title auditInfo.workflow_remark = workflow_remark auditInfo.audit_auth_groups = '' auditInfo.current_audit = '-1' auditInfo.next_audit = '-1' auditInfo.current_status = WorkflowDict.workflow_status[ 'audit_success'] # 审核通过 auditInfo.create_user = create_user auditInfo.create_user_display = request.user.display auditInfo.save() result['data'] = { 'workflow_status': WorkflowDict.workflow_status['audit_success'] } result['msg'] = '无审核配置,直接审核通过' else: # 向审核主表插入待审核数据 auditInfo = WorkflowAudit() auditInfo.group_id = group_id auditInfo.group_name = group_name auditInfo.workflow_id = workflow_id auditInfo.workflow_type = workflow_type auditInfo.workflow_title = workflow_title auditInfo.workflow_remark = workflow_remark auditInfo.audit_auth_groups = ','.join(audit_auth_groups_list) auditInfo.current_audit = audit_auth_groups_list[0] # 判断有无下级审核 if len(audit_auth_groups_list) == 1: auditInfo.next_audit = '-1' else: auditInfo.next_audit = audit_auth_groups_list[1] auditInfo.current_status = WorkflowDict.workflow_status[ 'audit_wait'] auditInfo.create_user = create_user auditInfo.create_user_display = request.user.display auditInfo.save() result['data'] = { 'workflow_status': WorkflowDict.workflow_status['audit_wait'] } # 消息通知 workflow_url = "{}://{}/workflow/{}".format(request.scheme, request.get_host(), auditInfo.audit_id) email_cc = kwargs.get('listCcAddr', []) send_msg(auditInfo.audit_id, 0, workflow_url=workflow_url, email_cc=email_cc) # 返回添加结果 return result
'id', 'cluster_name', 'master_host', 'master_port', 'master_user', 'master_password', 'create_time', 'update_time' ] # 工单管理 @admin.register(workflow) class workflowAdmin(admin.ModelAdmin): list_display = ('id', 'workflow_name', 'group_id', 'cluster_name', 'engineer', 'create_time', 'status', 'is_backup') search_fields = [ 'id', 'workflow_name', 'engineer', 'review_man', 'sql_content' ] if SysConfig().sys_config.get('query') == 'true': # 查询从库配置 @admin.register(slave_config) class WorkflowAuditAdmin(admin.ModelAdmin): list_display = ('cluster_name', 'slave_host', 'slave_port', 'slave_user', 'create_time', 'update_time') search_fields = [ 'id', 'cluster_name', 'slave_host', 'slave_port', 'slave_user', 'slave_password', ] # 审批流程配置
def menu(): sys_config = SysConfig().sys_config if sys_config.get( 'sqladvisor') == '' or sys_config.get('sqladvisor') is None: sqladvisor_display = 'false' else: sqladvisor_display = 'true' leftMenuBtnsCommon = ( { 'key': 'sqlworkflow', 'name': 'SQL上线工单', 'url': '/sqlworkflow/', 'class': 'glyphicon glyphicon-home', 'display': 'true' }, { 'key': 'sqlquery', 'name': 'SQL在线查询', 'url': '/sqlquery/', 'class': 'glyphicon glyphicon-search', 'display': sys_config.get('query') }, { 'key': 'slowquery', 'name': 'SQL慢查日志', 'url': '/slowquery/', 'class': 'glyphicon glyphicon-align-right', 'display': sys_config.get('slowquery') }, { 'key': 'sqladvisor', 'name': 'SQL优化工具', 'url': '/sqladvisor/', 'class': 'glyphicon glyphicon-wrench', 'display': sqladvisor_display }, { 'key': 'queryapply', 'name': '查询权限管理', 'url': '/queryapplylist/', 'class': 'glyphicon glyphicon-eye-open', 'display': sys_config.get('query') }, { 'key': 'diagnosis', 'name': '主库会话管理', 'url': '/diagnosis_process/', 'class': 'glyphicon glyphicon-scissors', 'display': sys_config.get('db_process_manage') }, ) leftMenuBtnsSuper = ( { 'key': 'config', 'name': '系统配置管理', 'url': '/config/', 'class': 'glyphicon glyphicon glyphicon-option-horizontal', 'display': 'true' }, { 'key': 'admin', 'name': '后台数据管理', 'url': '/admin/', 'class': 'glyphicon glyphicon-list', 'display': 'true' }, ) leftMenuBtnsDoc = ( { 'key': 'dbaprinciples', 'name': 'SQL审核必读', 'url': '/dbaprinciples/', 'class': 'glyphicon glyphicon-book', 'display': 'true' }, { 'key': 'charts', 'name': '统计图表展示', 'url': '/charts/', 'class': 'glyphicon glyphicon-file', 'display': 'true' }, ) return leftMenuBtnsCommon, leftMenuBtnsSuper, leftMenuBtnsDoc
def slowquery_review(request): cluster_name = request.POST.get('cluster_name') # 判断是RDS还是其他实例 cluster_info = master_config.objects.get(cluster_name=cluster_name) if SysConfig().sys_config.get('aliyun_rds_manage') == 'true': # 调用阿里云慢日志接口 result = aliyun_rds_slowquery_review(request) else: StartTime = request.POST.get('StartTime') EndTime = request.POST.get('EndTime') DBName = request.POST.get('db_name') limit = int(request.POST.get('limit')) offset = int(request.POST.get('offset')) limit = offset + limit # 时间处理 if StartTime == EndTime: EndTime = datetime.datetime.strptime( EndTime, '%Y-%m-%d') + datetime.timedelta(days=1) # DBName非必传 if DBName: # 获取慢查数据 slowsql_obj = SlowQuery.objects.filter( slowqueryhistory__hostname_max=(cluster_info.master_host + ':' + str(cluster_info.master_port)), slowqueryhistory__db_max=DBName, slowqueryhistory__ts_min__range=(StartTime, EndTime), last_seen__range=(StartTime, EndTime)).annotate( CreateTime=F('last_seen'), SQLId=F('checksum'), DBName=F('slowqueryhistory__db_max'), # 数据库 SQLText=F('fingerprint'), # SQL语句 ).values('CreateTime', 'SQLId', 'DBName', 'SQLText').annotate( MySQLTotalExecutionCounts=Sum( 'slowqueryhistory__ts_cnt'), # 执行总次数 MySQLTotalExecutionTimes=Sum( 'slowqueryhistory__query_time_sum'), # 执行总时长 ParseTotalRowCounts=Sum( 'slowqueryhistory__rows_examined_sum'), # 扫描总行数 ReturnTotalRowCounts=Sum( 'slowqueryhistory__rows_sent_sum'), # 返回总行数 ).order_by('-MySQLTotalExecutionCounts')[offset: limit] # 执行总次数倒序排列 slowsql_obj_count = SlowQuery.objects.filter( slowqueryhistory__hostname_max=(cluster_info.master_host + ':' + str(cluster_info.master_port)), slowqueryhistory__db_max=DBName, slowqueryhistory__ts_min__range=(StartTime, EndTime), last_seen__range=(StartTime, EndTime)).annotate( CreateTime=F('last_seen'), SQLId=F('checksum'), DBName=F('slowqueryhistory__db_max'), # 数据库 SQLText=F('fingerprint'), # SQL语句 ).values('CreateTime', 'SQLId', 'DBName', 'SQLText').annotate( MySQLTotalExecutionCounts=Sum( 'slowqueryhistory__ts_cnt'), # 执行总次数 MySQLTotalExecutionTimes=Sum( 'slowqueryhistory__query_time_sum'), # 执行总时长 ParseTotalRowCounts=Sum( 'slowqueryhistory__rows_examined_sum'), # 扫描总行数 ReturnTotalRowCounts=Sum( 'slowqueryhistory__rows_sent_sum'), # 返回总行数 ).count() else: # 获取慢查数据 slowsql_obj = SlowQuery.objects.filter( slowqueryhistory__hostname_max=(cluster_info.master_host + ':' + str(cluster_info.master_port)), slowqueryhistory__ts_min__range=(StartTime, EndTime), last_seen__range=(StartTime, EndTime)).annotate( CreateTime=F('last_seen'), SQLId=F('checksum'), DBName=F('slowqueryhistory__db_max'), # 数据库 SQLText=F('fingerprint'), # SQL语句 ).values('CreateTime', 'SQLId', 'DBName', 'SQLText').annotate( MySQLTotalExecutionCounts=Sum( 'slowqueryhistory__ts_cnt'), # 执行总次数 MySQLTotalExecutionTimes=Sum( 'slowqueryhistory__query_time_sum'), # 执行总时长 ParseTotalRowCounts=Sum( 'slowqueryhistory__rows_examined_sum'), # 扫描总行数 ReturnTotalRowCounts=Sum( 'slowqueryhistory__rows_sent_sum'), # 返回总行数 ).order_by('-MySQLTotalExecutionCounts')[offset: limit] # 执行总次数倒序排列 slowsql_obj_count = SlowQuery.objects.filter( slowqueryhistory__hostname_max=(cluster_info.master_host + ':' + str(cluster_info.master_port)), slowqueryhistory__ts_min__range=(StartTime, EndTime), last_seen__range=(StartTime, EndTime)).annotate( CreateTime=F('last_seen'), SQLId=F('checksum'), DBName=F('slowqueryhistory__db_max'), # 数据库 SQLText=F('fingerprint'), # SQL语句 ).values('CreateTime', 'SQLId', 'DBName', 'SQLText').annotate( MySQLTotalExecutionCounts=Sum( 'slowqueryhistory__ts_cnt'), # 执行总次数 MySQLTotalExecutionTimes=Sum( 'slowqueryhistory__query_time_sum'), # 执行总时长 ParseTotalRowCounts=Sum( 'slowqueryhistory__rows_examined_sum'), # 扫描总行数 ReturnTotalRowCounts=Sum( 'slowqueryhistory__rows_sent_sum'), # 返回总行数 ).count() # QuerySet 序列化 SQLSlowLog = [SlowLog for SlowLog in slowsql_obj] result = {"total": slowsql_obj_count, "rows": SQLSlowLog} # 返回查询结果 return HttpResponse(json.dumps(result, cls=ExtendJSONEncoder, bigint_as_string=True), content_type='application/json')
def slowquery_review_history(request): cluster_name = request.POST.get('cluster_name') # 判断是RDS还是其他实例 cluster_info = master_config.objects.get(cluster_name=cluster_name) if SysConfig().sys_config.get('aliyun_rds_manage') == 'true': # 调用阿里云慢日志接口 result = aliyun_rds_slowquery_review_history(request) else: StartTime = request.POST.get('StartTime') EndTime = request.POST.get('EndTime') DBName = request.POST.get('db_name') SQLId = request.POST.get('SQLId') limit = int(request.POST.get('limit')) offset = int(request.POST.get('offset')) # 时间处理 if StartTime == EndTime: EndTime = datetime.datetime.strptime( EndTime, '%Y-%m-%d') + datetime.timedelta(days=1) limit = offset + limit # SQLId、DBName非必传 if SQLId: # 获取慢查明细数据 slowsql_record_obj = SlowQueryHistory.objects.filter( hostname_max=(cluster_info.master_host + ':' + str(cluster_info.master_port)), checksum=int(SQLId), ts_min__range=(StartTime, EndTime)).annotate( ExecutionStartTime=F('ts_min'), # 执行开始时间 DBName=F('db_max'), # 数据库名 HostAddress=F('user_max'), # 用户名 SQLText=F('sample'), # SQL语句 QueryTimes=F('query_time_sum'), # 执行时长(秒) LockTimes=F('lock_time_sum'), # 锁定时长(秒) ParseRowCounts=F('rows_examined_sum'), # 解析行数 ReturnRowCounts=F('rows_sent_sum') # 返回行数 ).values('ExecutionStartTime', 'DBName', 'HostAddress', 'SQLText', 'QueryTimes', 'LockTimes', 'ParseRowCounts', 'ReturnRowCounts')[offset:limit] slowsql_obj_count = SlowQueryHistory.objects.filter( hostname_max=(cluster_info.master_host + ':' + str(cluster_info.master_port)), checksum=int(SQLId), ts_min__range=(StartTime, EndTime)).count() else: if DBName: # 获取慢查明细数据 slowsql_record_obj = SlowQueryHistory.objects.filter( hostname_max=(cluster_info.master_host + ':' + str(cluster_info.master_port)), db_max=DBName, ts_min__range=(StartTime, EndTime)).annotate( ExecutionStartTime=F('ts_min'), # 执行开始时间 DBName=F('db_max'), # 数据库名 HostAddress=F('user_max'), # 用户名 SQLText=F('sample'), # SQL语句 QueryTimes=F('query_time_sum'), # 执行时长(秒) LockTimes=F('lock_time_sum'), # 锁定时长(秒) ParseRowCounts=F('rows_examined_sum'), # 解析行数 ReturnRowCounts=F('rows_sent_sum') # 返回行数 ).values('ExecutionStartTime', 'DBName', 'HostAddress', 'SQLText', 'QueryTimes', 'LockTimes', 'ParseRowCounts', 'ReturnRowCounts')[offset:limit] # 执行总次数倒序排列 slowsql_obj_count = SlowQueryHistory.objects.filter( hostname_max=(cluster_info.master_host + ':' + str(cluster_info.master_port)), db_max=DBName, ts_min__range=(StartTime, EndTime)).count() else: # 获取慢查明细数据 slowsql_record_obj = SlowQueryHistory.objects.filter( hostname_max=(cluster_info.master_host + ':' + str(cluster_info.master_port)), ts_min__range=(StartTime, EndTime)).annotate( ExecutionStartTime=F('ts_min'), # 执行开始时间 DBName=F('db_max'), # 数据库名 HostAddress=F('user_max'), # 用户名 SQLText=F('sample'), # SQL语句 QueryTimes=F('query_time_sum'), # 执行时长(秒) LockTimes=F('lock_time_sum'), # 锁定时长(秒) ParseRowCounts=F('rows_examined_sum'), # 解析行数 ReturnRowCounts=F('rows_sent_sum') # 返回行数 ).values('ExecutionStartTime', 'DBName', 'HostAddress', 'SQLText', 'QueryTimes', 'LockTimes', 'ParseRowCounts', 'ReturnRowCounts')[offset:limit] # 执行总次数倒序排列 slowsql_obj_count = SlowQueryHistory.objects.filter( hostname_max=(cluster_info.master_host + ':' + str(cluster_info.master_port)), ts_min__range=(StartTime, EndTime)).count() # QuerySet 序列化 SQLSlowRecord = [SlowRecord for SlowRecord in slowsql_record_obj] result = {"total": slowsql_obj_count, "rows": SQLSlowRecord} # 返回查询结果 return HttpResponse(json.dumps(result, cls=ExtendJSONEncoder, bigint_as_string=True), content_type='application/json')
import simplejson as json from django.contrib.auth.decorators import permission_required from django.views.decorators.csrf import csrf_exempt from django.http import HttpResponse from sql.utils.aes_decryptor import Prpcrypt from sql.utils.dao import Dao from sql.utils.extend_json_encoder import ExtendJSONEncoder from sql.utils.config import SysConfig from .models import MasterConfig, AliyunRdsConfig if SysConfig().sys_config.get('aliyun_rds_manage') == 'true': from .aliyun_rds import process_status as aliyun_process_status, \ create_kill_session as aliyun_create_kill_session, kill_session as aliyun_kill_session, \ sapce_status as aliyun_sapce_status dao = Dao() prpCryptor = Prpcrypt() # 问题诊断--进程列表 @csrf_exempt @permission_required('sql.process_view', raise_exception=True) def process(request): cluster_name = request.POST.get('cluster_name') command_type = request.POST.get('command_type') base_sql = "select id, user, host, db, command, time, state, ifnull(info,'') as info from information_schema.processlist" # 判断是RDS还是其他实例 if len(AliyunRdsConfig.objects.filter(cluster_name=cluster_name)) > 0: