def soar(request): instance_name = request.POST.get('instance_name') db_name = request.POST.get('db_name') sql = request.POST.get('sql') result = {'status': 0, 'msg': 'ok', 'data': []} # 服务器端参数验证 if not (instance_name and db_name and sql): result['status'] = 1 result['msg'] = '页面提交参数可能为空' return HttpResponse(json.dumps(result), content_type='application/json') try: user_instances(request.user, 'all').get(instance_name=instance_name) except Exception: result['status'] = 1 result['msg'] = '你所在组未关联该实例' return HttpResponse(json.dumps(result), content_type='application/json') sql = sql.strip().replace('"', '\\"').replace('`', '\`').replace('\n', ' ') # 目标实例的连接信息 instance_info = Instance.objects.get(instance_name=instance_name) online_dsn = "{user}:{pwd}@{host}:{port}/{db}".format( user=instance_info.user, pwd=Prpcrypt().decrypt(instance_info.password), host=instance_info.host, port=instance_info.port, db=db_name) # 获取测试实例的连接信息和soar程序路径 soar_cfg = Soar() test_dsn = soar_cfg.soar_test_dsn soar_path = soar_cfg.soar_path if not (soar_path and test_dsn): result['status'] = 1 result['msg'] = '请配置soar_path和test_dsn!' return HttpResponse(json.dumps(result), content_type='application/json') # 提交给soar获取分析报告 try: p = subprocess.Popen( soar_path + ' -allow-online-as-test=false -report-type=markdown' + ' -query "{}" -online-dsn "{}" -test-dsn "{}" '.format( sql.strip(), online_dsn, test_dsn), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, universal_newlines=True) stdout, stderr = p.communicate() result['data'] = stdout except Exception: logger.error(traceback.format_exc()) result['data'] = 'soar运行报错,请检查相关日志' return HttpResponse(json.dumps(result), content_type='application/json')
def sqladvisor(request): sql_content = request.POST.get('sql_content') instance_name = request.POST.get('instance_name') dbName = request.POST.get('db_name') verbose = request.POST.get('verbose') result = {'status': 0, 'msg': 'ok', 'data': []} # 服务器端参数验证 if sql_content is None or instance_name is None: result['status'] = 1 result['msg'] = '页面提交参数可能为空' return HttpResponse(json.dumps(result), content_type='application/json') sql_content = sql_content.strip() try: user_instances(request.user, 'master').get(instance_name=instance_name) except Exception: result['status'] = 1 result['msg'] = '你所在组未关联该主库!' return HttpResponse(json.dumps(result), content_type='application/json') if verbose is None or verbose == '': verbose = 1 # 取出主库的连接信息 instance_info = Instance.objects.get(instance_name=instance_name) # 提交给sqladvisor获取审核结果 sqladvisor_path = SysConfig().sys_config.get('sqladvisor') sql_content = sql_content.strip().replace('"', '\\"').replace( '`', '\`').replace('\n', ' ') try: p = subprocess.Popen( sqladvisor_path + ' -h "%s" -P "%s" -u "%s" -p "%s\" -d "%s" -v %s -q "%s"' % (str(instance_info.host), str( instance_info.port), str(instance_info.user), str(Prpcrypt().decrypt(instance_info.password), ), str(dbName), verbose, sql_content), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, universal_newlines=True) stdout, stderr = p.communicate() result['data'] = stdout except Exception: logger.error(traceback.format_exc()) result['data'] = 'sqladvisor运行报错,请检查日志' return HttpResponse(json.dumps(result), content_type='application/json')
def schemasync(request): # 获取实例列表 instances = [ instance.instance_name for instance in user_instances(request.user, 'master') ] return render(request, 'schemasync.html', {'instances': instances})
def binlog2sql(request): # 获取实例列表 instances = [ instance.instance_name for instance in user_instances(request.user, 'all') ] return render(request, 'binlog2sql.html', {'instances': instances})
def sqlquery(request): # 获取用户关联从库列表 listAllClusterName = [ slave.instance_name for slave in user_instances(request.user, 'slave') ] context = {'listAllClusterName': listAllClusterName} return render(request, 'sqlquery.html', context)
def sqlquery(request): # 获取用户关联从库列表 instances = [ slave.instance_name for slave in user_instances(request.user, 'slave') ] context = {'instances': instances} return render(request, 'sqlquery.html', context)
def dbdiagnostic(request): # 获取用户关联主库列表 instance_name_list = [ master.instance_name for master in user_instances(request.user, 'master') ] context = {'tab': 'process', 'instance_name_list': instance_name_list} return render(request, 'dbdiagnostic.html', context)
def sqladvisor(request): # 获取用户关联主库列表 instance_name_list = [ master.instance_name for master in user_instances(request.user, 'master') ] context = {'listAllClusterName': instance_name_list} return render(request, 'sqladvisor.html', context)
def slowquery(request): # 获取用户关联主库列表 instance_name_list = [ master.instance_name for master in user_instances(request.user, 'master') ] context = {'tab': 'slowquery', 'instance_name_list': instance_name_list} return render(request, 'slowquery.html', context)
def dbdiagnostic(request): # 获取用户关联实例列表 instances = [ instance.instance_name for instance in user_instances(request.user, 'all') ] context = {'tab': 'process', 'instances': instances} return render(request, 'dbdiagnostic.html', context)
def sqladvisor(request): # 获取用户关联实例列表 instances = [ instance.instance_name for instance in user_instances(request.user, 'all') ] context = {'instances': instances} return render(request, 'sqladvisor.html', context)
def slowquery(request): # 获取用户关联实例列表 instances = [ instance.instance_name for instance in user_instances(request.user, 'all') ] context = {'tab': 'slowquery', 'instances': instances} return render(request, 'slowquery.html', context)
def instanceusercreate(request): if request.GET: instances = [ instance.instance_name for instance in user_instances(request.user, 'all') ] exteral_ip = get_out_ip() context = {'instances': instances, 'user_host': exteral_ip} return render(request, 'instanceusercreate.html', context) else: user_name = request.POST.get('user', '') password = request.POST.get('password', '') privileges = request.POST.get('privilegs') instance_name = request.POST.get('instance_name') user_host = request.POST.get('user_host') schema = request.POST.get('schema') tables = request.POST.get('tables') if user_name == '' and privileges == '' and user_host == '': error = "info is not empty" if password == '': password = generate_random_password(32) if schema is None or schema == '': schema = '*' data = {} data["user"] = user_name data['host'] = user_host data['schema'] = schema data['tables'] = tables result = {'status': 0, 'msg': 'ok', 'data': []} try: # 取出该实例实例的连接方式,为了后面连进去获取所有的表 tb_list = Dao(instance_name=instance_name).mysql_createuser(data) # 要把result转成JSON存进数据库里,方便SQL单子详细信息展示 result['data'] = tb_list except Exception as msg: result['status'] = 1 result['msg'] = str(msg) return
def slowquery_review(request): instance_name = request.POST.get('instance_name') # 服务端权限校验 try: user_instances(request.user, 'master').get(instance_name=instance_name) except Exception: result = {'status': 1, 'msg': '你所在组未关联该实例', 'data': []} return HttpResponse(json.dumps(result), content_type='application/json') # 判断是RDS还是其他实例 instance_info = Instance.objects.get(instance_name=instance_name) if len( AliyunRdsConfig.objects.filter(instance_name=instance_name, is_enable=1)) > 0: # 调用阿里云慢日志接口 result = aliyun_rds_slowquery_review(request) else: StartTime = request.POST.get('StartTime') EndTime = request.POST.get('EndTime') DBName = request.POST.get('db_name') limit = int(request.POST.get('limit')) offset = int(request.POST.get('offset')) limit = offset + limit # 时间处理 EndTime = datetime.datetime.strptime( EndTime, '%Y-%m-%d') + datetime.timedelta(days=1) # DBName非必传 if DBName: # 获取慢查数据 slowsql_obj = SlowQuery.objects.filter( slowqueryhistory__hostname_max=(instance_info.host + ':' + str(instance_info.port)), slowqueryhistory__db_max=DBName, slowqueryhistory__ts_min__range=(StartTime, EndTime)).annotate( SQLText=F('fingerprint'), SQLId=F('checksum')).values('SQLText', 'SQLId').annotate( CreateTime=Max('slowqueryhistory__ts_max'), DBName=Max('slowqueryhistory__db_max'), # 数据库 QueryTimeAvg=Sum('slowqueryhistory__query_time_sum') / Sum('slowqueryhistory__ts_cnt'), # 平均执行时长 MySQLTotalExecutionCounts=Sum( 'slowqueryhistory__ts_cnt'), # 执行总次数 MySQLTotalExecutionTimes=Sum( 'slowqueryhistory__query_time_sum'), # 执行总时长 ParseTotalRowCounts=Sum( 'slowqueryhistory__rows_examined_sum'), # 扫描总行数 ReturnTotalRowCounts=Sum( 'slowqueryhistory__rows_sent_sum'), # 返回总行数 ).order_by('-MySQLTotalExecutionCounts')[ offset:limit] # 执行总次数倒序排列 slowsql_obj_count = SlowQuery.objects.filter( slowqueryhistory__hostname_max=(instance_info.host + ':' + str(instance_info.port)), slowqueryhistory__db_max=DBName, slowqueryhistory__ts_min__range=(StartTime, EndTime)).annotate( SQLText=F('fingerprint'), SQLId=F('checksum')).values('SQLText', 'SQLId').annotate( CreateTime=Max('slowqueryhistory__ts_max'), DBName=Max('slowqueryhistory__db_max'), # 数据库 QueryTimeAvg=Sum('slowqueryhistory__query_time_sum') / Sum('slowqueryhistory__ts_cnt'), # 平均执行时长 MySQLTotalExecutionCounts=Sum( 'slowqueryhistory__ts_cnt'), # 执行总次数 MySQLTotalExecutionTimes=Sum( 'slowqueryhistory__query_time_sum'), # 执行总时长 ParseTotalRowCounts=Sum( 'slowqueryhistory__rows_examined_sum'), # 扫描总行数 ReturnTotalRowCounts=Sum( 'slowqueryhistory__rows_sent_sum'), # 返回总行数 ).count() else: # 获取慢查数据 slowsql_obj = SlowQuery.objects.filter( slowqueryhistory__hostname_max=(instance_info.host + ':' + str(instance_info.port)), slowqueryhistory__ts_min__range=(StartTime, EndTime), ).annotate(SQLText=F('fingerprint'), SQLId=F('checksum')).values( 'SQLText', 'SQLId').annotate( CreateTime=Max('slowqueryhistory__ts_max'), DBName=Max('slowqueryhistory__db_max'), # 数据库 QueryTimeAvg=Sum('slowqueryhistory__query_time_sum') / Sum('slowqueryhistory__ts_cnt'), # 平均执行时长 MySQLTotalExecutionCounts=Sum( 'slowqueryhistory__ts_cnt'), # 执行总次数 MySQLTotalExecutionTimes=Sum( 'slowqueryhistory__query_time_sum'), # 执行总时长 ParseTotalRowCounts=Sum( 'slowqueryhistory__rows_examined_sum'), # 扫描总行数 ReturnTotalRowCounts=Sum( 'slowqueryhistory__rows_sent_sum'), # 返回总行数 ).order_by('-MySQLTotalExecutionCounts')[offset: limit] # 执行总次数倒序排列 slowsql_obj_count = SlowQuery.objects.filter( slowqueryhistory__hostname_max=(instance_info.host + ':' + str(instance_info.port)), slowqueryhistory__ts_min__range=(StartTime, EndTime), ).annotate(SQLText=F('fingerprint'), SQLId=F('checksum')).values( 'SQLText', 'SQLId').annotate( CreateTime=Max('slowqueryhistory__ts_max'), DBName=Max('slowqueryhistory__db_max'), # 数据库 QueryTimeAvg=Sum('slowqueryhistory__query_time_sum') / Sum('slowqueryhistory__ts_cnt'), # 平均执行时长 MySQLTotalExecutionCounts=Sum( 'slowqueryhistory__ts_cnt'), # 执行总次数 MySQLTotalExecutionTimes=Sum( 'slowqueryhistory__query_time_sum'), # 执行总时长 ParseTotalRowCounts=Sum( 'slowqueryhistory__rows_examined_sum'), # 扫描总行数 ReturnTotalRowCounts=Sum( 'slowqueryhistory__rows_sent_sum'), # 返回总行数 ).count() # QuerySet 序列化 SQLSlowLog = [SlowLog for SlowLog in slowsql_obj] result = {"total": slowsql_obj_count, "rows": SQLSlowLog} # 返回查询结果 return HttpResponse(json.dumps(result, cls=ExtendJSONEncoder, bigint_as_string=True), content_type='application/json')
def slowquery_review_history(request): instance_name = request.POST.get('instance_name') # 服务端权限校验 try: user_instances(request.user, 'master').get(instance_name=instance_name) except Exception: result = {'status': 1, 'msg': '你所在组未关联该实例', 'data': []} return HttpResponse(json.dumps(result), content_type='application/json') # 判断是RDS还是其他实例 instance_info = Instance.objects.get(instance_name=instance_name) if len( AliyunRdsConfig.objects.filter(instance_name=instance_name, is_enable=1)) > 0: # 调用阿里云慢日志接口 result = aliyun_rds_slowquery_review_history(request) else: StartTime = request.POST.get('StartTime') EndTime = request.POST.get('EndTime') DBName = request.POST.get('db_name') SQLId = request.POST.get('SQLId') limit = int(request.POST.get('limit')) offset = int(request.POST.get('offset')) # 时间处理 EndTime = datetime.datetime.strptime( EndTime, '%Y-%m-%d') + datetime.timedelta(days=1) limit = offset + limit # SQLId、DBName非必传 if SQLId: # 获取慢查明细数据 slowsql_record_obj = SlowQueryHistory.objects.filter( hostname_max=(instance_info.host + ':' + str(instance_info.port)), checksum=SQLId, ts_min__range=(StartTime, EndTime)).annotate( ExecutionStartTime=F( 'ts_min'), # 本次统计(每5分钟一次)该类型sql语句出现的最小时间 DBName=F('db_max'), # 数据库名 HostAddress=Concat(V('\''), 'user_max', V('\''), V('@'), V('\''), 'client_max', V('\'')), # 用户名 SQLText=F('sample'), # SQL语句 TotalExecutionCounts=F('ts_cnt'), # 本次统计该sql语句出现的次数 QueryTimePct95=F('query_time_pct_95'), # 本次统计该sql语句95%耗时 QueryTimes=F('query_time_sum'), # 本次统计该sql语句花费的总时间(秒) LockTimes=F('lock_time_sum'), # 本次统计该sql语句锁定总时长(秒) ParseRowCounts=F('rows_examined_sum'), # 本次统计该sql语句解析总行数 ReturnRowCounts=F('rows_sent_sum') # 本次统计该sql语句返回总行数 ).values('ExecutionStartTime', 'DBName', 'HostAddress', 'SQLText', 'TotalExecutionCounts', 'QueryTimePct95', 'QueryTimes', 'LockTimes', 'ParseRowCounts', 'ReturnRowCounts')[offset:limit] slowsql_obj_count = SlowQueryHistory.objects.filter( hostname_max=(instance_info.host + ':' + str(instance_info.port)), checksum=SQLId, ts_min__range=(StartTime, EndTime)).count() else: if DBName: # 获取慢查明细数据 slowsql_record_obj = SlowQueryHistory.objects.filter( hostname_max=(instance_info.host + ':' + str(instance_info.port)), db_max=DBName, ts_min__range=(StartTime, EndTime) ).annotate( ExecutionStartTime=F( 'ts_min'), # 本次统计(每5分钟一次)该类型sql语句出现的最小时间 DBName=F('db_max'), # 数据库名 HostAddress=Concat(V('\''), 'user_max', V('\''), V('@'), V('\''), 'client_max', V('\'')), # 用户名 SQLText=F('sample'), # SQL语句 TotalExecutionCounts=F('ts_cnt'), # 本次统计该sql语句出现的次数 QueryTimePct95=F('query_time_pct_95'), # 本次统计该sql语句出现的次数 QueryTimes=F('query_time_sum'), # 本次统计该sql语句花费的总时间(秒) LockTimes=F('lock_time_sum'), # 本次统计该sql语句锁定总时长(秒) ParseRowCounts=F('rows_examined_sum'), # 本次统计该sql语句解析总行数 ReturnRowCounts=F('rows_sent_sum') # 本次统计该sql语句返回总行数 ).values('ExecutionStartTime', 'DBName', 'HostAddress', 'SQLText', 'TotalExecutionCounts', 'QueryTimePct95', 'QueryTimes', 'LockTimes', 'ParseRowCounts', 'ReturnRowCounts')[offset:limit] # 执行总次数倒序排列 slowsql_obj_count = SlowQueryHistory.objects.filter( hostname_max=(instance_info.host + ':' + str(instance_info.port)), db_max=DBName, ts_min__range=(StartTime, EndTime)).count() else: # 获取慢查明细数据 slowsql_record_obj = SlowQueryHistory.objects.filter( hostname_max=(instance_info.host + ':' + str(instance_info.port)), ts_min__range=(StartTime, EndTime) ).annotate( ExecutionStartTime=F( 'ts_min'), # 本次统计(每5分钟一次)该类型sql语句出现的最小时间 DBName=F('db_max'), # 数据库名 HostAddress=Concat(V('\''), 'user_max', V('\''), V('@'), V('\''), 'client_max', V('\'')), # 用户名 SQLText=F('sample'), # SQL语句 TotalExecutionCounts=F('ts_cnt'), # 本次统计该sql语句出现的次数 QueryTimePct95=F('query_time_pct_95'), # 本次统计该sql语句95%耗时 QueryTimes=F('query_time_sum'), # 本次统计该sql语句花费的总时间(秒) LockTimes=F('lock_time_sum'), # 本次统计该sql语句锁定总时长(秒) ParseRowCounts=F('rows_examined_sum'), # 本次统计该sql语句解析总行数 ReturnRowCounts=F('rows_sent_sum') # 本次统计该sql语句返回总行数 ).values('ExecutionStartTime', 'DBName', 'HostAddress', 'SQLText', 'TotalExecutionCounts', 'QueryTimePct95', 'QueryTimes', 'LockTimes', 'ParseRowCounts', 'ReturnRowCounts')[offset:limit] # 执行总次数倒序排列 slowsql_obj_count = SlowQueryHistory.objects.filter( hostname_max=(instance_info.host + ':' + str(instance_info.port)), ts_min__range=(StartTime, EndTime)).count() # QuerySet 序列化 SQLSlowRecord = [SlowRecord for SlowRecord in slowsql_record_obj] result = {"total": slowsql_obj_count, "rows": SQLSlowRecord} # 返回查询结果 return HttpResponse(json.dumps(result, cls=ExtendJSONEncoder, bigint_as_string=True), content_type='application/json')
def applyforprivileges(request): title = request.POST['title'] instance_name = request.POST['instance_name'] group_name = request.POST['group_name'] group_id = SqlGroup.objects.get(group_name=group_name).group_id priv_type = request.POST['priv_type'] db_name = request.POST['db_name'] valid_date = request.POST['valid_date'] limit_num = request.POST['limit_num'] try: workflow_remark = request.POST['apply_remark'] except Exception: workflow_remark = '' # 获取用户信息 user = request.user # 服务端参数校验 result = {'status': 0, 'msg': 'ok', 'data': []} if int(priv_type) == 1: db_list = request.POST['db_list'] if title is None or instance_name is None or db_list is None or valid_date is None or limit_num is None: result['status'] = 1 result['msg'] = '请填写完整' return HttpResponse(json.dumps(result), content_type='application/json') elif int(priv_type) == 2: table_list = request.POST['table_list'] if title is None or instance_name is None or db_name is None or valid_date is None or table_list is None or limit_num is None: result['status'] = 1 result['msg'] = '请填写完整' return HttpResponse(json.dumps(result), content_type='application/json') try: user_instances(request.user, 'slave').get(instance_name=instance_name) except Exception: context = {'errMsg': '你所在组未关联该实例!'} return render(request, 'error.html', context) # 判断是否需要限制到表级别的权限 # 库权限 if int(priv_type) == 1: db_list = db_list.split(',') # 检查申请账号是否已拥整个库的查询权限 own_dbs = QueryPrivileges.objects.filter( instance_name=instance_name, user_name=user.username, db_name__in=db_list, valid_date__gte=datetime.datetime.now(), priv_type=1, is_deleted=0).values('db_name') own_db_list = [table_info['db_name'] for table_info in own_dbs] if own_db_list is None: pass else: for db_name in db_list: if db_name in own_db_list: result['status'] = 1 result[ 'msg'] = '你已拥有' + instance_name + '实例' + db_name + '库的全部查询权限,不能重复申请' return HttpResponse(json.dumps(result), content_type='application/json') # 表权限 elif int(priv_type) == 2: table_list = table_list.split(',') # 检查申请账号是否已拥有该表的查询权限 own_tables = QueryPrivileges.objects.filter( instance_name=instance_name, user_name=user.username, db_name=db_name, table_name__in=table_list, valid_date__gte=datetime.datetime.now(), priv_type=2, is_deleted=0).values('table_name') own_table_list = [ table_info['table_name'] for table_info in own_tables ] if own_table_list is None: pass else: for table_name in table_list: if table_name in own_table_list: result['status'] = 1 result[ 'msg'] = '你已拥有' + instance_name + '实例' + db_name + '.' + table_name + '表的查询权限,不能重复申请' return HttpResponse(json.dumps(result), content_type='application/json') # 使用事务保持数据一致性 try: with transaction.atomic(): # 保存申请信息到数据库 applyinfo = QueryPrivilegesApply() applyinfo.title = title applyinfo.group_id = group_id applyinfo.group_name = group_name applyinfo.audit_auth_groups = Workflow.audit_settings( group_id, WorkflowDict.workflow_type['query']) applyinfo.user_name = user.username applyinfo.user_display = user.display applyinfo.instance_name = instance_name if int(priv_type) == 1: applyinfo.db_list = ','.join(db_list) applyinfo.table_list = '' elif int(priv_type) == 2: applyinfo.db_list = db_name applyinfo.table_list = ','.join(table_list) applyinfo.priv_type = int(priv_type) applyinfo.valid_date = valid_date applyinfo.status = WorkflowDict.workflow_status[ 'audit_wait'] # 待审核 applyinfo.limit_num = limit_num applyinfo.create_user = user.username applyinfo.save() apply_id = applyinfo.apply_id # 调用工作流插入审核信息,查询权限申请workflow_type=1 audit_result = workflowOb.addworkflowaudit( request, WorkflowDict.workflow_type['query'], apply_id) if audit_result['status'] == 0: # 更新业务表审核状态,判断是否插入权限信息 query_audit_call_back(apply_id, audit_result['data']['workflow_status']) except Exception as msg: logger.error(traceback.format_exc()) result['status'] = 1 result['msg'] = str(msg) else: result = audit_result return HttpResponse(json.dumps(result), content_type='application/json')
def autoreview(request): workflow_id = request.POST.get('workflow_id') sql_content = request.POST['sql_content'] workflow_title = request.POST['workflow_name'] group_name = request.POST['group_name'] group_id = SqlGroup.objects.get(group_name=group_name).group_id instance_name = request.POST['instance_name'] db_name = request.POST.get('db_name') is_backup = request.POST['is_backup'] notify_users = request.POST.getlist('notify_users') # 服务器端参数验证 if sql_content is None or workflow_title is None or instance_name is None or db_name is None or is_backup is None: context = {'errMsg': '页面提交参数可能为空'} return render(request, 'error.html', context) # 验证组权限(用户是否在该组、该组是否有指定实例) try: user_instances(request.user, 'master').get(instance_name=instance_name) except Exception: context = {'errMsg': '你所在组未关联该实例!'} return render(request, 'error.html', context) # # 删除注释语句 # sql_content = ''.join( # map(lambda x: re.compile(r'(^--.*|^/\*.*\*/;\s*$)').sub('', x, count=1), # sql_content.splitlines(1))).strip() # # 去除空行 # sql_content = re.sub('[\r\n\f]{2,}', '\n', sql_content) sql_content = sql_content.strip() if sql_content[-1] != ";": context = {'errMsg': "SQL语句结尾没有以;结尾,请后退重新修改并提交!"} return render(request, 'error.html', context) # 交给inception进行自动审核 try: inception_result = InceptionDao( instance_name=instance_name).sqlauto_review(sql_content, db_name) except Exception as msg: context = {'errMsg': msg} return render(request, 'error.html', context) if inception_result is None or len(inception_result) == 0: context = {'errMsg': 'inception返回的结果集为空!可能是SQL语句有语法错误'} return render(request, 'error.html', context) # 要把result转成JSON存进数据库里,方便SQL单子详细信息展示 json_result = json.dumps(inception_result) # 遍历result,看是否有任何自动审核不通过的地方,并且按配置确定是标记审核不通过还是放行,放行的可以在工单内跳过inception直接执行 sys_config = SysConfig().sys_config is_manual = 0 workflow_status = Const.workflowStatus['manreviewing'] for row in inception_result: # 1表示警告,不影响执行 if row[2] == 1 and sys_config.get('auto_review_wrong', '') == '1': workflow_status = Const.workflowStatus['autoreviewwrong'] break # 2表示严重错误,或者inception不支持的语法,标记手工执行,可以跳过inception直接执行 elif row[2] == 2: is_manual = 1 if sys_config.get('auto_review_wrong', '') in ('', '1', '2'): workflow_status = Const.workflowStatus['autoreviewwrong'] break elif re.match(r"\w*comments\w*", row[4]): is_manual = 1 if sys_config.get('auto_review_wrong', '') in ('', '1', '2'): workflow_status = Const.workflowStatus['autoreviewwrong'] break # 判断SQL是否包含DDL语句,SQL语法 1、DDL,2、DML sql_syntax = 2 for row in sql_content.strip(';').split(';'): if re.match(r"^alter|^create|^drop|^truncate|^rename", row.strip().lower()): sql_syntax = 1 break # 调用工作流生成工单 # 使用事务保持数据一致性 try: with transaction.atomic(): # 存进数据库里 engineer = request.user.username if not workflow_id: sql_workflow = SqlWorkflow() sql_workflow.create_time = timezone.now() else: sql_workflow = SqlWorkflow.objects.get(id=int(workflow_id)) sql_workflow.workflow_name = workflow_title sql_workflow.group_id = group_id sql_workflow.group_name = group_name sql_workflow.engineer = engineer sql_workflow.engineer_display = request.user.display sql_workflow.audit_auth_groups = Workflow.audit_settings( group_id, WorkflowDict.workflow_type['sqlreview']) sql_workflow.status = workflow_status sql_workflow.is_backup = is_backup sql_workflow.review_content = json_result sql_workflow.instance_name = instance_name sql_workflow.db_name = db_name sql_workflow.sql_content = sql_content sql_workflow.execute_result = '' sql_workflow.is_manual = is_manual sql_workflow.audit_remark = '' sql_workflow.sql_syntax = sql_syntax sql_workflow.save() workflow_id = sql_workflow.id # 自动审核通过了,才调用工作流 if workflow_status == Const.workflowStatus['manreviewing']: # 调用工作流插入审核信息, 查询权限申请workflow_type=2 # 抄送通知人 list_cc_addr = [ email['email'] for email in Users.objects.filter( username__in=notify_users).values('email') ] workflowOb.addworkflowaudit( request, WorkflowDict.workflow_type['sqlreview'], workflow_id, list_cc_addr=list_cc_addr) except Exception as msg: logger.error(traceback.format_exc()) context = {'errMsg': msg} return render(request, 'error.html', context) return HttpResponseRedirect(reverse('sql:detail', args=(workflow_id, )))
def slowquery_review_history(request): instance_name = request.POST.get('instance_name') # 服务端权限校验 try: user_instances(request.user, 'master').get(instance_name=instance_name) except Exception: result = {'status': 1, 'msg': '你所在组未关联该主库', 'data': []} return HttpResponse(json.dumps(result), content_type='application/json') # 判断是RDS还是其他实例 instance_info = Instance.objects.get(instance_name=instance_name) if len(AliyunRdsConfig.objects.filter(instance_name=instance_name)) > 0: if SysConfig().sys_config.get('aliyun_rds_manage') == 'true': # 调用阿里云慢日志接口 result = aliyun_rds_slowquery_review_history(request) else: raise Exception('未开启rds管理,无法查看rds数据!') else: StartTime = request.POST.get('StartTime') EndTime = request.POST.get('EndTime') DBName = request.POST.get('db_name') SQLId = request.POST.get('SQLId') limit = int(request.POST.get('limit')) offset = int(request.POST.get('offset')) # 时间处理 EndTime = datetime.datetime.strptime(EndTime, '%Y-%m-%d') + datetime.timedelta(days=1) limit = offset + limit # SQLId、DBName非必传 if SQLId: # 获取慢查明细数据 slowsql_record_obj = SlowQueryHistory.objects.filter( hostname_max=(instance_info.host + ':' + str(instance_info.port)), checksum=SQLId, ts_min__range=(StartTime, EndTime) ).annotate(ExecutionStartTime=F('ts_min'), # 执行开始时间 DBName=F('db_max'), # 数据库名 HostAddress=F('user_max'), # 用户名 SQLText=F('sample'), # SQL语句 QueryTimes=F('query_time_sum'), # 执行时长(秒) LockTimes=F('lock_time_sum'), # 锁定时长(秒) ParseRowCounts=F('rows_examined_sum'), # 解析行数 ReturnRowCounts=F('rows_sent_sum') # 返回行数 ).values( 'ExecutionStartTime', 'DBName', 'HostAddress', 'SQLText', 'QueryTimes', 'LockTimes', 'ParseRowCounts', 'ReturnRowCounts' )[offset:limit] slowsql_obj_count = SlowQueryHistory.objects.filter( hostname_max=(instance_info.host + ':' + str(instance_info.port)), checksum=SQLId, ts_min__range=(StartTime, EndTime) ).count() else: if DBName: # 获取慢查明细数据 slowsql_record_obj = SlowQueryHistory.objects.filter( hostname_max=(instance_info.host + ':' + str(instance_info.port)), db_max=DBName, ts_min__range=(StartTime, EndTime) ).annotate(ExecutionStartTime=F('ts_min'), # 执行开始时间 DBName=F('db_max'), # 数据库名 HostAddress=F('user_max'), # 用户名 SQLText=F('sample'), # SQL语句 QueryTimes=F('query_time_sum'), # 执行时长(秒) LockTimes=F('lock_time_sum'), # 锁定时长(秒) ParseRowCounts=F('rows_examined_sum'), # 解析行数 ReturnRowCounts=F('rows_sent_sum') # 返回行数 ).values( 'ExecutionStartTime', 'DBName', 'HostAddress', 'SQLText', 'QueryTimes', 'LockTimes', 'ParseRowCounts', 'ReturnRowCounts' )[offset:limit] # 执行总次数倒序排列 slowsql_obj_count = SlowQueryHistory.objects.filter( hostname_max=(instance_info.host + ':' + str(instance_info.port)), db_max=DBName, ts_min__range=(StartTime, EndTime) ).count() else: # 获取慢查明细数据 slowsql_record_obj = SlowQueryHistory.objects.filter( hostname_max=(instance_info.host + ':' + str(instance_info.port)), ts_min__range=(StartTime, EndTime) ).annotate(ExecutionStartTime=F('ts_min'), # 执行开始时间 DBName=F('db_max'), # 数据库名 HostAddress=F('user_max'), # 用户名 SQLText=F('sample'), # SQL语句 QueryTimes=F('query_time_sum'), # 执行时长(秒) LockTimes=F('lock_time_sum'), # 锁定时长(秒) ParseRowCounts=F('rows_examined_sum'), # 解析行数 ReturnRowCounts=F('rows_sent_sum') # 返回行数 ).values( 'ExecutionStartTime', 'DBName', 'HostAddress', 'SQLText', 'QueryTimes', 'LockTimes', 'ParseRowCounts', 'ReturnRowCounts' )[offset:limit] # 执行总次数倒序排列 slowsql_obj_count = SlowQueryHistory.objects.filter( hostname_max=(instance_info.host + ':' + str(instance_info.port)), ts_min__range=(StartTime, EndTime) ).count() # QuerySet 序列化 SQLSlowRecord = [SlowRecord for SlowRecord in slowsql_record_obj] result = {"total": slowsql_obj_count, "rows": SQLSlowRecord} # 返回查询结果 return HttpResponse(json.dumps(result, cls=ExtendJSONEncoder, bigint_as_string=True), content_type='application/json')