Exemple #1
0
def loginAuthenticate(username, password):
    """登录认证,包含一个登录失败计数器,5分钟内连续失败5次的账号,会被锁定5分钟"""
    sys_config = SysConfig().sys_config
    if sys_config.get('lock_cnt_threshold'):
        lockCntThreshold = int(sys_config.get('lock_cnt_threshold'))
    else:
        lockCntThreshold = 5
    if sys_config.get('lock_time_threshold'):
        lockTimeThreshold = int(sys_config.get('lock_time_threshold'))
    else:
        lockTimeThreshold = 300

    # 服务端二次验证参数
    if username == "" or password == "" or username is None or password is None:
        result = {'status': 2, 'msg': '登录用户名或密码为空,请重新输入!', 'data': ''}
    elif username in login_failure_counter and login_failure_counter[username][
            "cnt"] >= lockCntThreshold and (
                datetime.datetime.now() - login_failure_counter[username]
                ["last_failure_time"]).seconds <= lockTimeThreshold:
        log_mail_record(
            'user:{},login failed, account locking...'.format(username))
        result = {'status': 3, 'msg': '登录失败超过5次,该账号已被锁定5分钟!', 'data': ''}
    else:
        # 登录
        user = authenticate(username=username, password=password)
        print(type(user))
        # 登录成功
        if user:
            # 如果登录失败计数器中存在该用户名,则清除之
            if username in login_failure_counter:
                login_failure_counter.pop(username)
            result = {'status': 0, 'msg': 'ok', 'data': user}
        # 登录失败
        else:
            if username not in login_failure_counter:
                # 第一次登录失败,登录失败计数器中不存在该用户,则创建一个该用户的计数器
                login_failure_counter[username] = {
                    "cnt": 1,
                    "last_failure_time": datetime.datetime.now()
                }
            else:
                if (datetime.datetime.now() -
                        login_failure_counter[username]["last_failure_time"]
                    ).seconds <= lockTimeThreshold:
                    login_failure_counter[username]["cnt"] += 1
                else:
                    # 上一次登录失败时间早于5分钟前,则重新计数。以达到超过5分钟自动解锁的目的。
                    login_failure_counter[username]["cnt"] = 1
                login_failure_counter[username][
                    "last_failure_time"] = datetime.datetime.now()
            log_mail_record('user:{},login failed, fail count:{}'.format(
                username, login_failure_counter[username]["cnt"]))
            result = {'status': 1, 'msg': '用户名或密码错误,请重新输入!', 'data': ''}
    return result
Exemple #2
0
def tablesapce(request):
    cluster_name = request.POST.get('cluster_name')

    # 判断是RDS还是其他实例
    if len(AliyunRdsConfig.objects.filter(cluster_name=cluster_name)) > 0:
        if SysConfig().sys_config.get('aliyun_rds_manage') == 'true':
            result = aliyun_sapce_status(request)
        else:
            raise Exception('未开启rds管理,无法查看rds数据!')
    else:
        master_info = MasterConfig.objects.get(cluster_name=cluster_name)
        sql = '''
        SELECT
          table_schema,
          table_name,
          engine,
          TRUNCATE((data_length+index_length+data_free)/1024/1024,2) AS total_size,
          table_rows,
          TRUNCATE(data_length/1024/1024,2) AS data_size,
          TRUNCATE(index_length/1024/1024,2) AS index_size,
          TRUNCATE(data_free/1024/1024,2) AS data_free,
          TRUNCATE(data_free/(data_length+index_length+data_free)*100,2) AS pct_free
        FROM information_schema.tables 
        WHERE table_schema NOT IN ('information_schema', 'performance_schema', 'mysql', 'test', 'sys')
          ORDER BY total_size DESC 
        LIMIT 14;'''.format(cluster_name)
        table_space = dao.mysql_query(
            master_info.master_host, master_info.master_port,
            master_info.master_user,
            prpCryptor.decrypt(master_info.master_password),
            'information_schema', sql)
        column_list = table_space['column_list']
        rows = []
        for row in table_space['rows']:
            row_info = {}
            for row_index, row_item in enumerate(row):
                row_info[column_list[row_index]] = row_item
            rows.append(row_info)

        result = {'status': 0, 'msg': 'ok', 'data': rows}

    # 返回查询结果
    return HttpResponse(json.dumps(result,
                                   cls=ExtendJSONEncoder,
                                   bigint_as_string=True),
                        content_type='application/json')
Exemple #3
0
def sqladvisorcheck(request):
    sqlContent = request.POST.get('sql_content')
    instance_name = request.POST.get('instance_name')
    dbName = request.POST.get('db_name')
    verbose = request.POST.get('verbose')
    finalResult = {'status': 0, 'msg': 'ok', 'data': []}

    # 服务器端参数验证
    if sqlContent is None or instance_name is None:
        finalResult['status'] = 1
        finalResult['msg'] = '页面提交参数可能为空'
        return HttpResponse(json.dumps(finalResult), content_type='application/json')

    sqlContent = sqlContent.strip()
    if sqlContent[-1] != ";":
        finalResult['status'] = 1
        finalResult['msg'] = 'SQL语句结尾没有以;结尾,请重新修改并提交!'
        return HttpResponse(json.dumps(finalResult), content_type='application/json')
    try:
        user_instances(request.user, 'master').get(instance_name=instance_name)
    except Exception:
        finalResult['status'] = 1
        finalResult['msg'] = '你所在组未关联该主库!'
        return HttpResponse(json.dumps(finalResult), content_type='application/json')

    if verbose is None or verbose == '':
        verbose = 1

    # 取出主库的连接信息
    instance_info = Instance.objects.get(instance_name=instance_name)

    # 提交给sqladvisor获取审核结果
    sqladvisor_path = SysConfig().sys_config.get('sqladvisor')
    sqlContent = sqlContent.strip().replace('"', '\\"').replace('`', '\`').replace('\n', ' ')
    try:
        p = subprocess.Popen(sqladvisor_path + ' -h "%s" -P "%s" -u "%s" -p "%s\" -d "%s" -v %s -q "%s"' % (
            str(instance_info.host), str(instance_info.port), str(instance_info.user),
            str(prpCryptor.decrypt(instance_info.password), ), str(dbName), verbose, sqlContent),
                             stdin=subprocess.PIPE,
                             stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, universal_newlines=True)
        stdout, stderr = p.communicate()
        finalResult['data'] = stdout
    except Exception:
        finalResult['data'] = 'sqladvisor运行报错,请联系管理员'
    return HttpResponse(json.dumps(finalResult), content_type='application/json')
Exemple #4
0
def kill_session(request):
    cluster_name = request.POST.get('cluster_name')
    request_params = request.POST.get('request_params')

    result = {'status': 0, 'msg': 'ok', 'data': []}
    # 判断是RDS还是其他实例
    if SysConfig().sys_config.get('aliyun_rds_manage') == 'true':
        result = aliyun_kill_session(request)
    else:
        master_info = master_config.objects.get(cluster_name=cluster_name)
        kill_sql = request_params
        dao.mysql_execute(master_info.master_host, master_info.master_port,
                          master_info.master_user,
                          prpCryptor.decrypt(master_info.master_password),
                          'information_schema', kill_sql)

    # 返回查询结果
    return HttpResponse(json.dumps(result), content_type='application/json')
Exemple #5
0
    def __init__(self):
        try:
            sys_config = SysConfig().sys_config
            self.MAIL_REVIEW_SMTP_SERVER = sys_config.get('mail_smtp_server')
            if sys_config.get('mail_smtp_port'):
                self.MAIL_REVIEW_SMTP_PORT = int(
                    sys_config.get('mail_smtp_port'))
            else:
                self.MAIL_REVIEW_SMTP_PORT = 25
            self.MAIL_REVIEW_FROM_ADDR = sys_config.get('mail_smtp_user')
            self.MAIL_REVIEW_FROM_PASSWORD = sys_config.get(
                'mail_smtp_password')

        except AttributeError as a:
            print("Error: %s" % a)
        except ValueError as v:
            print("Error: %s" % v)
Exemple #6
0
def kill_session(request):
    instance_name = request.POST.get('instance_name')
    request_params = request.POST.get('request_params')

    result = {'status': 0, 'msg': 'ok', 'data': []}
    # 判断是RDS还是其他实例
    if len(AliyunRdsConfig.objects.filter(instance_name=instance_name)) > 0:
        if SysConfig().sys_config.get('aliyun_rds_manage') == 'true':
            result = aliyun_kill_session(request)
        else:
            raise Exception('未开启rds管理,无法查看rds数据!')
    else:
        kill_sql = request_params
        Dao(instance_name=instance_name).mysql_execute('information_schema',
                                                       kill_sql)

    # 返回查询结果
    return HttpResponse(json.dumps(result,
                                   cls=ExtendJSONEncoder,
                                   bigint_as_string=True),
                        content_type='application/json')
Exemple #7
0
def execute_call_back(workflowId, clusterName, url):
    workflowDetail = workflow.objects.get(id=workflowId)
    # 获取审核人
    reviewMan = workflowDetail.review_man

    dictConn = getMasterConnStr(clusterName)
    try:
        # 交给inception先split,再执行
        (finalStatus, finalList) = InceptionDao().executeFinal(workflowDetail, dictConn)

        # 封装成JSON格式存进数据库字段里
        strJsonResult = json.dumps(finalList)
        workflowDetail = workflow.objects.get(id=workflowId)
        workflowDetail.execute_result = strJsonResult
        workflowDetail.finish_time = timezone.now()
        workflowDetail.status = finalStatus
        workflowDetail.is_manual = 0
        workflowDetail.audit_remark = ''
        # 关闭后重新获取连接,防止超时
        connection.close()
        workflowDetail.save()
    except Exception as e:
        logger.error(e)

    # 如果执行完毕了,则根据settings.py里的配置决定是否给提交者和DBA一封邮件提醒,DBA需要知晓审核并执行过的单子
    if SysConfig().sys_config.get('mail') == 'true':
        # 给申请人,DBA各发一封邮件
        engineer = workflowDetail.engineer
        workflowStatus = workflowDetail.status
        workflowName = workflowDetail.workflow_name
        strTitle = "SQL上线工单执行完毕 # " + str(workflowId)
        strContent = "发起人:" + engineer + "\n审核人:" + reviewMan + "\n工单地址:" + url \
                     + "\n工单名称: " + workflowName + "\n执行结果:" + workflowStatus
        # 邮件通知申请人,审核人,抄送DBA
        notify_users = reviewMan.split(',')
        notify_users.append(engineer)
        listToAddr = [email['email'] for email in users.objects.filter(username__in=notify_users).values('email')]
        listCcAddr = [email['email'] for email in users.objects.filter(role='DBA').values('email')]
        MailSender().sendEmail(strTitle, strContent, listToAddr, listCcAddr=listCcAddr)
Exemple #8
0
def process(request):
    cluster_name = request.POST.get('cluster_name')
    command_type = request.POST.get('command_type')

    base_sql = "select id, user, host, db, command, time, state, ifnull(info,'') as info from information_schema.processlist"
    # 判断是RDS还是其他实例
    if len(AliyunRdsConfig.objects.filter(cluster_name=cluster_name)) > 0:
        if SysConfig().sys_config.get('aliyun_rds_manage') == 'true':
            result = aliyun_process_status(request)
        else:
            raise Exception('未开启rds管理,无法查看rds数据!')
    else:
        master_info = MasterConfig.objects.get(cluster_name=cluster_name)
        if command_type == 'All':
            sql = base_sql + ";"
        elif command_type == 'Not Sleep':
            sql = "{} where command<>'Sleep';".format(base_sql)
        else:
            sql = "{} where command= '{}';".format(base_sql, command_type)
        processlist = dao.mysql_query(
            master_info.master_host, master_info.master_port,
            master_info.master_user,
            prpCryptor.decrypt(master_info.master_password),
            'information_schema', sql)
        column_list = processlist['column_list']
        rows = []
        for row in processlist['rows']:
            row_info = {}
            for row_index, row_item in enumerate(row):
                row_info[column_list[row_index]] = row_item
            rows.append(row_info)
        result = {'status': 0, 'msg': 'ok', 'data': rows}

    # 返回查询结果
    return HttpResponse(json.dumps(result,
                                   cls=ExtendJSONEncoder,
                                   bigint_as_string=True),
                        content_type='application/json')
Exemple #9
0
def create_kill_session(request):
    cluster_name = request.POST.get('cluster_name')
    ThreadIDs = request.POST.get('ThreadIDs')

    result = {'status': 0, 'msg': 'ok', 'data': []}
    # 判断是RDS还是其他实例
    if SysConfig().sys_config.get('aliyun_rds_manage') == 'true':
        result = aliyun_create_kill_session(request)
    else:
        master_info = master_config.objects.get(cluster_name=cluster_name)
        ThreadIDs = ThreadIDs.replace('[', '').replace(']', '')
        sql = "select concat('kill ', id, ';') from information_schema.processlist where id in ({});".format(
            ThreadIDs)
        all_kill_sql = dao.mysql_query(
            master_info.master_host, master_info.master_port,
            master_info.master_user,
            prpCryptor.decrypt(master_info.master_password),
            'information_schema', sql)
        kill_sql = ''
        for row in all_kill_sql['rows']:
            kill_sql = kill_sql + row[0]
        result['data'] = kill_sql
    # 返回查询结果
    return HttpResponse(json.dumps(result), content_type='application/json')
Exemple #10
0
def kill_session(request):
    cluster_name = request.POST.get('cluster_name')
    request_params = request.POST.get('request_params')

    result = {'status': 0, 'msg': 'ok', 'data': []}
    # 判断是RDS还是其他实例
    if len(AliyunRdsConfig.objects.filter(cluster_name=cluster_name)) > 0:
        if SysConfig().sys_config.get('aliyun_rds_manage') == 'true':
            result = aliyun_kill_session(request)
        else:
            raise Exception('未开启rds管理,无法查看rds数据!')
    else:
        master_info = MasterConfig.objects.get(cluster_name=cluster_name)
        kill_sql = request_params
        dao.mysql_execute(master_info.master_host, master_info.master_port,
                          master_info.master_user,
                          prpCryptor.decrypt(master_info.master_password),
                          'information_schema', kill_sql)

    # 返回查询结果
    return HttpResponse(json.dumps(result,
                                   cls=ExtendJSONEncoder,
                                   bigint_as_string=True),
                        content_type='application/json')
Exemple #11
0
def query(request):
    cluster_name = request.POST.get('cluster_name')
    sqlContent = request.POST.get('sql_content')
    dbName = request.POST.get('db_name')
    limit_num = request.POST.get('limit_num')

    finalResult = {'status': 0, 'msg': 'ok', 'data': {}}

    # 服务器端参数验证
    if sqlContent is None or dbName is None or cluster_name is None or limit_num is None:
        finalResult['status'] = 1
        finalResult['msg'] = '页面提交参数可能为空'
        return HttpResponse(json.dumps(finalResult),
                            content_type='application/json')

    sqlContent = sqlContent.strip()
    if sqlContent[-1] != ";":
        finalResult['status'] = 1
        finalResult['msg'] = 'SQL语句结尾没有以;结尾,请重新修改并提交!'
        return HttpResponse(json.dumps(finalResult),
                            content_type='application/json')

    # 获取用户信息
    user = request.user

    # 过滤注释语句和非查询的语句
    sqlContent = ''.join(
        map(
            lambda x: re.compile(r'(^--\s+.*|^/\*.*\*/;\s*$)').sub(
                '', x, count=1), sqlContent.splitlines(1))).strip()
    # 去除空行
    sqlContent = re.sub('[\r\n\f]{2,}', '\n', sqlContent)

    sql_list = sqlContent.strip().split('\n')
    for sql in sql_list:
        if re.match(r"^select|^show|^explain", sql.lower()):
            break
        else:
            finalResult['status'] = 1
            finalResult['msg'] = '仅支持^select|^show|^explain语法,请联系管理员!'
            return HttpResponse(json.dumps(finalResult),
                                content_type='application/json')

    # 取出该实例的连接方式,查询只读账号,按照分号截取第一条有效sql执行
    slave_info = SlaveConfig.objects.get(cluster_name=cluster_name)
    sqlContent = sqlContent.strip().split(';')[0]

    # 查询权限校验
    priv_check_info = query_priv_check(user, cluster_name, dbName, sqlContent,
                                       limit_num)

    if priv_check_info['status'] == 0:
        limit_num = priv_check_info['data']
    else:
        return HttpResponse(json.dumps(priv_check_info),
                            content_type='application/json')

    if re.match(r"^explain", sqlContent.lower()):
        limit_num = 0

    # 对查询sql增加limit限制
    if re.match(r"^select", sqlContent.lower()):
        if re.search(r"limit\s+(\d+)$", sqlContent.lower()) is None:
            if re.search(r"limit\s+\d+\s*,\s*(\d+)$",
                         sqlContent.lower()) is None:
                sqlContent = sqlContent + ' limit ' + str(limit_num)

    sqlContent = sqlContent + ';'

    # 执行查询语句,统计执行时间
    t_start = time.time()
    sql_result = dao.mysql_query(slave_info.slave_host, slave_info.slave_port,
                                 slave_info.slave_user,
                                 prpCryptor.decrypt(slave_info.slave_password),
                                 str(dbName), sqlContent, limit_num)
    t_end = time.time()
    cost_time = "%5s" % "{:.4f}".format(t_end - t_start)

    sql_result['cost_time'] = cost_time

    # 数据脱敏,同样需要检查配置,是否开启脱敏,语法树解析是否允许出错继续执行
    t_start = time.time()
    if SysConfig().sys_config.get('data_masking') == 'true':
        # 仅对查询语句进行脱敏
        if re.match(r"^select", sqlContent.lower()):
            try:
                masking_result = datamasking.data_masking(
                    cluster_name, dbName, sqlContent, sql_result)
            except Exception:
                if SysConfig().sys_config.get('query_check') == 'true':
                    finalResult['status'] = 1
                    finalResult['msg'] = '脱敏数据报错,请联系管理员'
                    return HttpResponse(json.dumps(finalResult),
                                        content_type='application/json')
            else:
                if masking_result['status'] != 0:
                    if SysConfig().sys_config.get('query_check') == 'true':
                        return HttpResponse(json.dumps(masking_result),
                                            content_type='application/json')

    t_end = time.time()
    masking_cost_time = "%5s" % "{:.4f}".format(t_end - t_start)

    sql_result['masking_cost_time'] = masking_cost_time

    finalResult['data'] = sql_result

    # 成功的查询语句记录存入数据库
    if sql_result.get('Error'):
        pass
    else:
        query_log = QueryLog()
        query_log.username = user.username
        query_log.user_display = user.display
        query_log.db_name = dbName
        query_log.cluster_name = cluster_name
        query_log.sqllog = sqlContent
        if int(limit_num) == 0:
            limit_num = int(sql_result['effect_row'])
        else:
            limit_num = min(int(limit_num), int(sql_result['effect_row']))
        query_log.effect_row = limit_num
        query_log.cost_time = cost_time
        # 防止查询超时
        try:
            query_log.save()
        except:
            connection.close()
            query_log.save()

    # 返回查询结果
    return HttpResponse(json.dumps(finalResult,
                                   cls=ExtendJSONEncoder,
                                   bigint_as_string=True),
                        content_type='application/json')
Exemple #12
0
class InceptionDao(object):
    def __init__(self):
        self.sys_config = SysConfig().sys_config
        self.inception_host = self.sys_config.get('inception_host')
        if self.sys_config.get('inception_port'):
            self.inception_port = int(self.sys_config.get('inception_port'))
        else:
            self.inception_port = 6669

        self.inception_remote_backup_host = self.sys_config.get(
            'inception_remote_backup_host')
        if self.sys_config.get('inception_remote_backup_port'):
            self.inception_remote_backup_port = int(
                self.sys_config.get('inception_remote_backup_port'))
        else:
            self.inception_remote_backup_port = 3306
        self.inception_remote_backup_user = self.sys_config.get(
            'inception_remote_backup_user')
        self.inception_remote_backup_password = self.sys_config.get(
            'inception_remote_backup_password')
        self.prpCryptor = Prpcrypt()

    def criticalDDL(self, sqlContent):
        '''
        识别DROP DATABASE, DROP TABLE, TRUNCATE PARTITION, TRUNCATE TABLE等高危DDL操作,因为对于这些操作,inception在备份时只能备份METADATA,而不会备份数据!
        如果识别到包含高危操作,则返回“审核不通过”
        '''
        resultList = []
        criticalSqlFound = 0
        critical_ddl_regex = self.sys_config.get('critical_ddl_regex')
        p = re.compile(critical_ddl_regex)
        # 删除注释语句
        sqlContent = ''.join(
            map(
                lambda x: re.compile(r'(^--\s+.*|^/\*.*\*/;\s*$)').sub(
                    '', x, count=1), sqlContent.splitlines(1))).strip()

        for row in sqlContent.rstrip(';').split(';'):
            if p.match(row.strip().lower()):
                result = ('', '', 2, '驳回高危SQL',
                          '禁止提交匹配' + critical_ddl_regex + '条件的语句!', row, '',
                          '', '', '')
                criticalSqlFound = 1
            else:
                result = ('', '', 0, '', 'None', row, '', '', '', '')
            resultList.append(result)
        if criticalSqlFound == 1:
            return resultList
        else:
            return None

    def preCheck(self, sqlContent):
        '''
        在提交给inception之前,预先识别一些Inception不能正确审核的SQL,比如"alter table t1;"或"alter table test.t1;" 以免导致inception core dump
        '''
        resultList = []
        syntaxErrorSqlFound = 0
        for row in sqlContent.rstrip(';').split(';'):
            if re.match(
                    r"(\s*)alter(\s+)table(\s+)(\S+)(\s*);|(\s*)alter(\s+)table(\s+)(\S+)\.(\S+)(\s*);",
                    row.lower() + ";"):
                result = ('', '', 2, 'SQL语法错误', 'ALTER TABLE 必须带有选项', row, '',
                          '', '', '')
                syntaxErrorSqlFound = 1
            else:
                result = ('', '', 0, '', 'None', row, '', '', '', '')
            resultList.append(result)
        if syntaxErrorSqlFound == 1:
            return resultList
        else:
            return None

    def sqlautoReview(self, sqlContent, instance_name, db_name, isSplit="no"):
        '''
        将sql交给inception进行自动审核,并返回审核结果。
        '''
        listMasters = Instance.objects.filter(instance_name=instance_name)
        masterHost = listMasters[0].host
        masterPort = listMasters[0].port
        masterUser = listMasters[0].user
        masterPassword = self.prpCryptor.decrypt(listMasters[0].password)

        # 高危SQL检查
        if self.sys_config.get('critical_ddl_regex', '') != '':
            criticalDDL_check = self.criticalDDL(sqlContent)
        else:
            criticalDDL_check = None

        if criticalDDL_check is not None:
            result = criticalDDL_check
        else:
            preCheckResult = self.preCheck(sqlContent)
            if preCheckResult is not None:
                result = preCheckResult
            else:
                if isSplit == "yes":
                    # 这种场景只给osc进度功能使用
                    # 如果一个工单中同时包含DML和DDL,那么执行时被split后的SQL与提交的SQL会不一样(会在每条语句前面加use database;),导致osc进度更新取不到正确的SHA1值。
                    # 请参考inception文档中--enable-split参数的说明

                    sqlSplit = "/*--user=%s; --password=%s; --host=%s; --enable-execute;--port=%s; --enable-ignore-warnings;--enable-split;*/\
                         inception_magic_start;\
                         use %s;\
                         %s\
                         inception_magic_commit;" % (
                        masterUser, masterPassword, masterHost,
                        str(masterPort), db_name, sqlContent)
                    splitResult = self._fetchall(sqlSplit, self.inception_host,
                                                 self.inception_port, '', '',
                                                 '')
                    tmpList = []
                    for splitRow in splitResult:
                        sqlTmp = splitRow[1]
                        sql = "/*--user=%s;--password=%s;--host=%s;--enable-check;--port=%s; --enable-ignore-warnings;*/\
                                inception_magic_start;\
                                %s\
                                inception_magic_commit;" % (
                            masterUser, masterPassword, masterHost,
                            str(masterPort), sqlTmp)
                        reviewResult = self._fetchall(sql, self.inception_host,
                                                      self.inception_port, '',
                                                      '', '')
                        tmpList.append(reviewResult)

                    # 二次加工一下
                    finalList = []
                    for splitRow in tmpList:
                        for sqlRow in splitRow:
                            finalList.append(list(sqlRow))
                    result = finalList
                else:
                    # 工单审核使用
                    sql = "/*--user=%s;--password=%s;--host=%s;--enable-check=1;--port=%s;*/\
                      inception_magic_start;\
                      use %s;\
                      %s\
                      inception_magic_commit;" % (masterUser, masterPassword,
                                                  masterHost, str(masterPort),
                                                  db_name, sqlContent)
                    result = self._fetchall(sql, self.inception_host,
                                            self.inception_port, '', '', '')
        return result

    def executeFinal(self, workflowDetail, dictConn):
        '''
        将sql交给inception进行最终执行,并返回执行结果。
        '''
        strBackup = ""
        if workflowDetail.is_backup == '是':
            strBackup = "--enable-remote-backup;"
        else:
            strBackup = "--disable-remote-backup;"

        # 根据inception的要求,执行之前最好先split一下
        sqlSplit = "/*--user=%s; --password=%s; --host=%s; --enable-execute;--port=%s; --enable-ignore-warnings;--enable-split;*/\
             inception_magic_start;\
             use %s;\
             %s\
             inception_magic_commit;" % (
            dictConn['masterUser'], dictConn['masterPassword'],
            dictConn['masterHost'], str(dictConn['masterPort']),
            workflowDetail.db_name, workflowDetail.sql_content)
        splitResult = self._fetchall(sqlSplit, self.inception_host,
                                     self.inception_port, '', '', '')

        tmpList = []
        # 对于split好的结果,再次交给inception执行.这里无需保持在长连接里执行,短连接即可.
        for splitRow in splitResult:
            sqlTmp = splitRow[1]
            sqlExecute = "/*--user=%s;--password=%s;--host=%s;--enable-execute;--port=%s; --enable-ignore-warnings;%s*/\
                    inception_magic_start;\
                    %s\
                    inception_magic_commit;" % (
                dictConn['masterUser'], dictConn['masterPassword'],
                dictConn['masterHost'], str(
                    dictConn['masterPort']), strBackup, sqlTmp)

            executeResult = self._fetchall(sqlExecute, self.inception_host,
                                           self.inception_port, '', '', '')
            for sqlRow in executeResult:
                tmpList.append(sqlRow)
            # 每执行一次,就将执行结果更新到工单的execute_result,便于获取osc进度时对比
            workflowDetail.execute_result = json.dumps(tmpList)
            try:
                workflowDetail.save()
            except Exception:
                # 关闭后重新获取连接,防止超时
                connection.close()
                workflowDetail.save()

        # 二次加工一下,目的是为了和sqlautoReview()函数的return保持格式一致,便于在detail页面渲染.
        finalStatus = "已正常结束"
        finalList = []
        for sqlRow in tmpList:
            # 如果发现任何一个行执行结果里有errLevel为1或2,并且stagestatus列没有包含Execute Successfully字样,则判断最终执行结果为有异常.
            if (sqlRow[2] == 1 or sqlRow[2] == 2) and re.match(
                    r"\w*Execute Successfully\w*", sqlRow[3]) is None:
                finalStatus = "执行有异常"
            finalList.append(list(sqlRow))

        return (finalStatus, finalList)

    def getRollbackSqlList(self, workflowId):
        workflowDetail = SqlWorkflow.objects.get(id=workflowId)
        listExecuteResult = json.loads(workflowDetail.execute_result)
        # 回滚数据倒序展示
        listExecuteResult.reverse()
        listBackupSql = []
        for row in listExecuteResult:
            try:
                # 获取backup_dbname
                if row[8] == 'None':
                    continue
                backupDbName = row[8]
                sequence = row[7]
                sql = row[5]
                opidTime = sequence.replace("'", "")
                sqlTable = "select tablename from %s.$_$Inception_backup_information$_$ where opid_time='%s';" % (
                    backupDbName, opidTime)
                listTables = self._fetchall(
                    sqlTable, self.inception_remote_backup_host,
                    self.inception_remote_backup_port,
                    self.inception_remote_backup_user,
                    self.inception_remote_backup_password, '')
                if listTables:
                    tableName = listTables[0][0]
                    sqlBack = "select rollback_statement from %s.%s where opid_time='%s'" % (
                        backupDbName, tableName, opidTime)
                    listBackup = self._fetchall(
                        sqlBack, self.inception_remote_backup_host,
                        self.inception_remote_backup_port,
                        self.inception_remote_backup_user,
                        self.inception_remote_backup_password, '')
                    block_rollback_sql_list = [sql]
                    block_rollback_sql = '\n'.join(
                        [back_info[0] for back_info in listBackup])
                    block_rollback_sql_list.append(block_rollback_sql)
                    listBackupSql.append(block_rollback_sql_list)
            except Exception as e:
                raise Exception(e)
        return listBackupSql

    def _fetchall(self, sql, paramHost, paramPort, paramUser, paramPasswd,
                  paramDb):
        '''
        封装mysql连接和获取结果集方法
        '''
        result = None
        conn = None
        cur = None

        try:
            conn = MySQLdb.connect(host=paramHost,
                                   user=paramUser,
                                   passwd=paramPasswd,
                                   db=paramDb,
                                   port=paramPort,
                                   charset='utf8')
            cur = conn.cursor()
            ret = cur.execute(sql)
            result = cur.fetchall()
        except Exception as e:
            raise Exception(e)
        finally:
            if cur is not None:
                cur.close()
            if conn is not None:
                conn.close()
        return result

    def getOscPercent(self, sqlSHA1):
        """已知SHA1值,去inception里查看OSC进度"""
        sqlStr = "inception get osc_percent '%s'" % sqlSHA1
        result = self._fetchall(sqlStr, self.inception_host,
                                self.inception_port, '', '', '')
        if len(result) > 0:
            percent = result[0][3]
            timeRemained = result[0][4]
            pctResult = {
                "status": 0,
                "msg": "ok",
                "data": {
                    "percent": percent,
                    "timeRemained": timeRemained
                }
            }
        else:
            pctResult = {
                "status": 1,
                "msg": "没找到该SQL的进度信息,是否已经执行完毕?",
                "data": {
                    "percent": -100,
                    "timeRemained": -100
                }
            }
        return pctResult

    def stopOscProgress(self, sqlSHA1):
        """已知SHA1值,调用inception命令停止OSC进程,涉及的Inception命令和注意事项,请参考http://mysql-inception.github.io/inception-document/osc/"""
        sqlStr = "inception stop alter '%s'" % sqlSHA1
        result = self._fetchall(sqlStr, self.inception_host,
                                self.inception_port, '', '', '')
        if result is not None:
            optResult = {
                "status": 0,
                "msg": "已成功停止OSC进程,请注意清理触发器和临时表,先清理触发器再删除临时表",
                "data": ""
            }
        else:
            optResult = {
                "status": 1,
                "msg": "ERROR 2624 (HY000):未找到OSC执行进程,可能已经执行完成",
                "data": ""
            }
        return optResult

    def query_print(self, sqlContent, instance_name, dbName):
        '''
        将sql交给inception打印语法树。
        '''
        instance_info = Instance.objects.get(instance_name=instance_name)
        Host = instance_info.host
        Port = instance_info.port
        User = instance_info.user
        Password = self.prpCryptor.decrypt(instance_info.password)

        # 工单审核使用
        sql = "/*--user=%s;--password=%s;--host=%s;--port=%s;--enable-query-print;*/\
                          inception_magic_start;\
                          use %s;\
                          %s\
                          inception_magic_commit;" % (
            User, Password, Host, str(Port), dbName, sqlContent)
        result = self._fetchall(sql, self.inception_host, self.inception_port,
                                '', '', '')
        return result

    # inception执行情况统计
    def statistic(self):
        sql = '''
             select
                 sum(deleting)     deleting,
                 sum(inserting)    inserting,
                 sum(updating)     updating,
                 sum(selecting)    selecting,
                 sum(altertable)   altertable,
                 sum(renaming)     renaming,
                 sum(createindex)  createindex,
                 sum(dropindex)    dropindex,
                 sum(addcolumn)    addcolumn,
                 sum(dropcolumn)   dropcolumn,
                 sum(changecolumn) changecolumn,
                 sum(alteroption)  alteroption,
                 sum(alterconvert) alterconvert,
                 sum(createtable)  createtable,
                 sum(droptable)    droptable,
                 sum(createdb)     createdb,
                 sum(truncating)   truncating
               from statistic;'''
        try:
            return Dao(
                host=self.inception_remote_backup_host,
                user=self.inception_remote_backup_user,
                port=self.inception_remote_backup_port,
                password=self.inception_remote_backup_password).mysql_query(
                    'inception', sql)
        except Exception:
            return {'column_list': [], 'rows': [], 'effect_row': 0}
Exemple #13
0
    def addworkflowaudit(self, request, workflow_type, workflow_id, **kwargs):
        result = {'status': 0, 'msg': '', 'data': []}

        # 检查是否已存在待审核数据
        workflowInfo = WorkflowAudit.objects.filter(
            workflow_type=workflow_type,
            workflow_id=workflow_id,
            current_status=WorkflowDict.workflow_status['audit_wait'])
        if len(workflowInfo) >= 1:
            result['msg'] = '该工单当前状态为待审核,请勿重复提交'
            raise Exception(result['msg'])

        # 获取工单信息
        if workflow_type == WorkflowDict.workflow_type['query']:
            workflow_detail = QueryPrivilegesApply.objects.get(
                apply_id=workflow_id)
            workflow_title = workflow_detail.title
            group_id = workflow_detail.group_id
            group_name = workflow_detail.group_name
            create_user = workflow_detail.user_name
            audit_auth_groups = workflow_detail.audit_auth_groups
            workflow_remark = ''
        elif workflow_type == WorkflowDict.workflow_type['sqlreview']:
            workflow_detail = SqlWorkflow.objects.get(pk=workflow_id)
            workflow_title = workflow_detail.workflow_name
            group_id = workflow_detail.group_id
            group_name = workflow_detail.group_name
            create_user = workflow_detail.engineer
            audit_auth_groups = workflow_detail.audit_auth_groups
            workflow_remark = ''
        else:
            result['msg'] = '工单类型不存在'
            raise Exception(result['msg'])

        # 校验是否配置审批流程
        if audit_auth_groups is None:
            result['msg'] = '审批流程不能为空,请先配置审批流程'
            raise Exception(result['msg'])
        else:
            audit_auth_groups_list = audit_auth_groups.split(',')

        # 判断是否无需审核,并且修改审批人为空
        if SysConfig().sys_config.get('auto_review', False) == 'true':
            if workflow_type == WorkflowDict.workflow_type['sqlreview']:
                if is_autoreview(workflow_id):
                    Workflow = SqlWorkflow.objects.get(id=int(workflow_id))
                    Workflow.audit_auth_groups = '无需审批'
                    Workflow.status = '审核通过'
                    Workflow.save()
                    audit_auth_groups_list = None

        # 无审核配置则无需审核,直接通过
        if audit_auth_groups_list is None:
            # 向审核主表插入审核通过的数据
            auditInfo = WorkflowAudit()
            auditInfo.group_id = group_id
            auditInfo.group_name = group_name
            auditInfo.workflow_id = workflow_id
            auditInfo.workflow_type = workflow_type
            auditInfo.workflow_title = workflow_title
            auditInfo.workflow_remark = workflow_remark
            auditInfo.audit_auth_groups = ''
            auditInfo.current_audit = '-1'
            auditInfo.next_audit = '-1'
            auditInfo.current_status = WorkflowDict.workflow_status[
                'audit_success']  # 审核通过
            auditInfo.create_user = create_user
            auditInfo.create_user_display = request.user.display
            auditInfo.save()
            result['data'] = {
                'workflow_status':
                WorkflowDict.workflow_status['audit_success']
            }
            result['msg'] = '无审核配置,直接审核通过'
        else:
            # 向审核主表插入待审核数据
            auditInfo = WorkflowAudit()
            auditInfo.group_id = group_id
            auditInfo.group_name = group_name
            auditInfo.workflow_id = workflow_id
            auditInfo.workflow_type = workflow_type
            auditInfo.workflow_title = workflow_title
            auditInfo.workflow_remark = workflow_remark
            auditInfo.audit_auth_groups = ','.join(audit_auth_groups_list)
            auditInfo.current_audit = audit_auth_groups_list[0]
            # 判断有无下级审核
            if len(audit_auth_groups_list) == 1:
                auditInfo.next_audit = '-1'
            else:
                auditInfo.next_audit = audit_auth_groups_list[1]

            auditInfo.current_status = WorkflowDict.workflow_status[
                'audit_wait']
            auditInfo.create_user = create_user
            auditInfo.create_user_display = request.user.display
            auditInfo.save()
            result['data'] = {
                'workflow_status': WorkflowDict.workflow_status['audit_wait']
            }

        # 消息通知
        workflow_url = "{}://{}/workflow/{}".format(request.scheme,
                                                    request.get_host(),
                                                    auditInfo.audit_id)
        email_cc = kwargs.get('listCcAddr', [])
        send_msg(auditInfo.audit_id,
                 0,
                 workflow_url=workflow_url,
                 email_cc=email_cc)
        # 返回添加结果
        return result
Exemple #14
0
        'id', 'cluster_name', 'master_host', 'master_port', 'master_user',
        'master_password', 'create_time', 'update_time'
    ]


# 工单管理
@admin.register(workflow)
class workflowAdmin(admin.ModelAdmin):
    list_display = ('id', 'workflow_name', 'group_id', 'cluster_name',
                    'engineer', 'create_time', 'status', 'is_backup')
    search_fields = [
        'id', 'workflow_name', 'engineer', 'review_man', 'sql_content'
    ]


if SysConfig().sys_config.get('query') == 'true':
    # 查询从库配置
    @admin.register(slave_config)
    class WorkflowAuditAdmin(admin.ModelAdmin):
        list_display = ('cluster_name', 'slave_host', 'slave_port',
                        'slave_user', 'create_time', 'update_time')
        search_fields = [
            'id',
            'cluster_name',
            'slave_host',
            'slave_port',
            'slave_user',
            'slave_password',
        ]

    # 审批流程配置
Exemple #15
0
def menu():
    sys_config = SysConfig().sys_config
    if sys_config.get(
            'sqladvisor') == '' or sys_config.get('sqladvisor') is None:
        sqladvisor_display = 'false'
    else:
        sqladvisor_display = 'true'
    leftMenuBtnsCommon = (
        {
            'key': 'sqlworkflow',
            'name': 'SQL上线工单',
            'url': '/sqlworkflow/',
            'class': 'glyphicon glyphicon-home',
            'display': 'true'
        },
        {
            'key': 'sqlquery',
            'name': 'SQL在线查询',
            'url': '/sqlquery/',
            'class': 'glyphicon glyphicon-search',
            'display': sys_config.get('query')
        },
        {
            'key': 'slowquery',
            'name': 'SQL慢查日志',
            'url': '/slowquery/',
            'class': 'glyphicon glyphicon-align-right',
            'display': sys_config.get('slowquery')
        },
        {
            'key': 'sqladvisor',
            'name': 'SQL优化工具',
            'url': '/sqladvisor/',
            'class': 'glyphicon glyphicon-wrench',
            'display': sqladvisor_display
        },
        {
            'key': 'queryapply',
            'name': '查询权限管理',
            'url': '/queryapplylist/',
            'class': 'glyphicon glyphicon-eye-open',
            'display': sys_config.get('query')
        },
        {
            'key': 'diagnosis',
            'name': '主库会话管理',
            'url': '/diagnosis_process/',
            'class': 'glyphicon  glyphicon-scissors',
            'display': sys_config.get('db_process_manage')
        },
    )

    leftMenuBtnsSuper = (
        {
            'key': 'config',
            'name': '系统配置管理',
            'url': '/config/',
            'class': 'glyphicon glyphicon glyphicon-option-horizontal',
            'display': 'true'
        },
        {
            'key': 'admin',
            'name': '后台数据管理',
            'url': '/admin/',
            'class': 'glyphicon glyphicon-list',
            'display': 'true'
        },
    )

    leftMenuBtnsDoc = (
        {
            'key': 'dbaprinciples',
            'name': 'SQL审核必读',
            'url': '/dbaprinciples/',
            'class': 'glyphicon glyphicon-book',
            'display': 'true'
        },
        {
            'key': 'charts',
            'name': '统计图表展示',
            'url': '/charts/',
            'class': 'glyphicon glyphicon-file',
            'display': 'true'
        },
    )
    return leftMenuBtnsCommon, leftMenuBtnsSuper, leftMenuBtnsDoc
Exemple #16
0
def slowquery_review(request):
    cluster_name = request.POST.get('cluster_name')

    # 判断是RDS还是其他实例
    cluster_info = master_config.objects.get(cluster_name=cluster_name)
    if SysConfig().sys_config.get('aliyun_rds_manage') == 'true':
        # 调用阿里云慢日志接口
        result = aliyun_rds_slowquery_review(request)
    else:
        StartTime = request.POST.get('StartTime')
        EndTime = request.POST.get('EndTime')
        DBName = request.POST.get('db_name')
        limit = int(request.POST.get('limit'))
        offset = int(request.POST.get('offset'))
        limit = offset + limit

        # 时间处理
        if StartTime == EndTime:
            EndTime = datetime.datetime.strptime(
                EndTime, '%Y-%m-%d') + datetime.timedelta(days=1)
        # DBName非必传
        if DBName:
            # 获取慢查数据
            slowsql_obj = SlowQuery.objects.filter(
                slowqueryhistory__hostname_max=(cluster_info.master_host +
                                                ':' +
                                                str(cluster_info.master_port)),
                slowqueryhistory__db_max=DBName,
                slowqueryhistory__ts_min__range=(StartTime, EndTime),
                last_seen__range=(StartTime, EndTime)).annotate(
                    CreateTime=F('last_seen'),
                    SQLId=F('checksum'),
                    DBName=F('slowqueryhistory__db_max'),  # 数据库
                    SQLText=F('fingerprint'),  # SQL语句
                ).values('CreateTime', 'SQLId', 'DBName', 'SQLText').annotate(
                    MySQLTotalExecutionCounts=Sum(
                        'slowqueryhistory__ts_cnt'),  # 执行总次数
                    MySQLTotalExecutionTimes=Sum(
                        'slowqueryhistory__query_time_sum'),  # 执行总时长
                    ParseTotalRowCounts=Sum(
                        'slowqueryhistory__rows_examined_sum'),  # 扫描总行数
                    ReturnTotalRowCounts=Sum(
                        'slowqueryhistory__rows_sent_sum'),  # 返回总行数
                ).order_by('-MySQLTotalExecutionCounts')[offset:
                                                         limit]  # 执行总次数倒序排列

            slowsql_obj_count = SlowQuery.objects.filter(
                slowqueryhistory__hostname_max=(cluster_info.master_host +
                                                ':' +
                                                str(cluster_info.master_port)),
                slowqueryhistory__db_max=DBName,
                slowqueryhistory__ts_min__range=(StartTime, EndTime),
                last_seen__range=(StartTime, EndTime)).annotate(
                    CreateTime=F('last_seen'),
                    SQLId=F('checksum'),
                    DBName=F('slowqueryhistory__db_max'),  # 数据库
                    SQLText=F('fingerprint'),  # SQL语句
                ).values('CreateTime', 'SQLId', 'DBName', 'SQLText').annotate(
                    MySQLTotalExecutionCounts=Sum(
                        'slowqueryhistory__ts_cnt'),  # 执行总次数
                    MySQLTotalExecutionTimes=Sum(
                        'slowqueryhistory__query_time_sum'),  # 执行总时长
                    ParseTotalRowCounts=Sum(
                        'slowqueryhistory__rows_examined_sum'),  # 扫描总行数
                    ReturnTotalRowCounts=Sum(
                        'slowqueryhistory__rows_sent_sum'),  # 返回总行数
                ).count()
        else:
            # 获取慢查数据
            slowsql_obj = SlowQuery.objects.filter(
                slowqueryhistory__hostname_max=(cluster_info.master_host +
                                                ':' +
                                                str(cluster_info.master_port)),
                slowqueryhistory__ts_min__range=(StartTime, EndTime),
                last_seen__range=(StartTime, EndTime)).annotate(
                    CreateTime=F('last_seen'),
                    SQLId=F('checksum'),
                    DBName=F('slowqueryhistory__db_max'),  # 数据库
                    SQLText=F('fingerprint'),  # SQL语句
                ).values('CreateTime', 'SQLId', 'DBName', 'SQLText').annotate(
                    MySQLTotalExecutionCounts=Sum(
                        'slowqueryhistory__ts_cnt'),  # 执行总次数
                    MySQLTotalExecutionTimes=Sum(
                        'slowqueryhistory__query_time_sum'),  # 执行总时长
                    ParseTotalRowCounts=Sum(
                        'slowqueryhistory__rows_examined_sum'),  # 扫描总行数
                    ReturnTotalRowCounts=Sum(
                        'slowqueryhistory__rows_sent_sum'),  # 返回总行数
                ).order_by('-MySQLTotalExecutionCounts')[offset:
                                                         limit]  # 执行总次数倒序排列

            slowsql_obj_count = SlowQuery.objects.filter(
                slowqueryhistory__hostname_max=(cluster_info.master_host +
                                                ':' +
                                                str(cluster_info.master_port)),
                slowqueryhistory__ts_min__range=(StartTime, EndTime),
                last_seen__range=(StartTime, EndTime)).annotate(
                    CreateTime=F('last_seen'),
                    SQLId=F('checksum'),
                    DBName=F('slowqueryhistory__db_max'),  # 数据库
                    SQLText=F('fingerprint'),  # SQL语句
                ).values('CreateTime', 'SQLId', 'DBName', 'SQLText').annotate(
                    MySQLTotalExecutionCounts=Sum(
                        'slowqueryhistory__ts_cnt'),  # 执行总次数
                    MySQLTotalExecutionTimes=Sum(
                        'slowqueryhistory__query_time_sum'),  # 执行总时长
                    ParseTotalRowCounts=Sum(
                        'slowqueryhistory__rows_examined_sum'),  # 扫描总行数
                    ReturnTotalRowCounts=Sum(
                        'slowqueryhistory__rows_sent_sum'),  # 返回总行数
                ).count()
        # QuerySet 序列化
        SQLSlowLog = [SlowLog for SlowLog in slowsql_obj]
        result = {"total": slowsql_obj_count, "rows": SQLSlowLog}

    # 返回查询结果
    return HttpResponse(json.dumps(result,
                                   cls=ExtendJSONEncoder,
                                   bigint_as_string=True),
                        content_type='application/json')
Exemple #17
0
import simplejson as json
from django.contrib.auth.decorators import permission_required

from django.views.decorators.csrf import csrf_exempt
from django.http import HttpResponse

from sql.utils.aes_decryptor import Prpcrypt
from sql.utils.dao import Dao
from sql.utils.extend_json_encoder import ExtendJSONEncoder
from sql.utils.config import SysConfig
from .models import MasterConfig, AliyunRdsConfig

if SysConfig().sys_config.get('aliyun_rds_manage') == 'true':
    from .aliyun_rds import process_status as aliyun_process_status, \
        create_kill_session as aliyun_create_kill_session, kill_session as aliyun_kill_session, \
        sapce_status as aliyun_sapce_status

dao = Dao()
prpCryptor = Prpcrypt()


# 问题诊断--进程列表
@csrf_exempt
@permission_required('sql.process_view', raise_exception=True)
def process(request):
    cluster_name = request.POST.get('cluster_name')
    command_type = request.POST.get('command_type')

    base_sql = "select id, user, host, db, command, time, state, ifnull(info,'') as info from information_schema.processlist"
    # 判断是RDS还是其他实例
    if len(AliyunRdsConfig.objects.filter(cluster_name=cluster_name)) > 0:
Exemple #18
0
def query_priv_check(user, cluster_name, dbName, sqlContent, limit_num):
    finalResult = {'status': 0, 'msg': 'ok', 'data': {}}
    # 检查用户是否有该数据库/表的查询权限
    if user.is_superuser:
        if SysConfig().sys_config.get('admin_query_limit'):
            user_limit_num = int(
                SysConfig().sys_config.get('admin_query_limit'))
        else:
            user_limit_num = 0
        if int(limit_num) == 0:
            limit_num = int(user_limit_num)
        else:
            limit_num = min(int(limit_num), int(user_limit_num))
        pass
    # 查看表结构和执行计划,inception会报错,故单独处理,explain直接跳过不做校验
    elif re.match(r"^show\s+create\s+table", sqlContent.lower()):
        tb_name = re.sub('^show\s+create\s+table',
                         '',
                         sqlContent,
                         count=1,
                         flags=0).strip()
        # 先判断是否有整库权限
        db_privileges = QueryPrivileges.objects.filter(
            user_name=user.username,
            cluster_name=cluster_name,
            db_name=dbName,
            priv_type=1,
            valid_date__gte=datetime.datetime.now(),
            is_deleted=0)
        # 无整库权限再验证表权限
        if len(db_privileges) == 0:
            tb_privileges = QueryPrivileges.objects.filter(
                user_name=user.username,
                cluster_name=cluster_name,
                db_name=dbName,
                table_name=tb_name,
                priv_type=2,
                valid_date__gte=datetime.datetime.now(),
                is_deleted=0)
            if len(tb_privileges) == 0:
                finalResult['status'] = 1
                finalResult[
                    'msg'] = '你无' + dbName + '.' + tb_name + '表的查询权限!请先到查询权限管理进行申请'
                return finalResult
    # sql查询, 可以校验到表级权限
    else:
        # 首先使用inception的语法树打印获取查询涉及的的表
        table_ref_result = datamasking.query_table_ref(sqlContent + ';',
                                                       cluster_name, dbName)

        # 正确解析拿到表数据,可以校验表权限
        if table_ref_result['status'] == 0:
            table_ref = table_ref_result['data']
            # 获取表信息,校验是否拥有全部表查询权限
            QueryPrivilegesOb = QueryPrivileges.objects.filter(
                user_name=user.username, cluster_name=cluster_name)
            # 先判断是否有整库权限
            for table in table_ref:
                db_privileges = QueryPrivilegesOb.filter(
                    db_name=table['db'],
                    priv_type=1,
                    valid_date__gte=datetime.datetime.now(),
                    is_deleted=0)
                # 无整库权限再验证表权限
                if len(db_privileges) == 0:
                    tb_privileges = QueryPrivilegesOb.filter(
                        db_name=table['db'],
                        table_name=table['table'],
                        valid_date__gte=datetime.datetime.now(),
                        is_deleted=0)
                    if len(tb_privileges) == 0:
                        finalResult['status'] = 1
                        finalResult['msg'] = '你无' + table['db'] + '.' + table[
                            'table'] + '表的查询权限!请先到查询权限管理进行申请'
                        return finalResult

        # 获取表数据报错,检查配置文件是否允许继续执行,并进行库权限校验
        else:
            table_ref = None
            # 校验库权限,防止inception的语法树打印错误时连库权限也未做校验
            privileges = QueryPrivileges.objects.filter(
                user_name=user.username,
                cluster_name=cluster_name,
                db_name=dbName,
                valid_date__gte=datetime.datetime.now(),
                is_deleted=0)
            if len(privileges) == 0:
                finalResult['status'] = 1
                finalResult['msg'] = '你无' + dbName + '数据库的查询权限!请先到查询权限管理进行申请'
                return finalResult
            if SysConfig().sys_config.get('query_check') == 'true':
                return table_ref_result
            else:
                pass

        # 获取查询涉及表的最小limit限制
        if table_ref:
            db_list = [table_info['db'] for table_info in table_ref]
            table_list = [table_info['table'] for table_info in table_ref]
            user_limit_num = QueryPrivileges.objects.filter(
                user_name=user.username,
                cluster_name=cluster_name,
                db_name__in=db_list,
                table_name__in=table_list,
                valid_date__gte=datetime.datetime.now(),
                is_deleted=0).aggregate(Min('limit_num'))['limit_num__min']
            if user_limit_num is None:
                # 如果表没获取到则获取涉及库的最小limit限制
                user_limit_num = QueryPrivileges.objects.filter(
                    user_name=user.username,
                    cluster_name=cluster_name,
                    db_name=dbName,
                    valid_date__gte=datetime.datetime.now(),
                    is_deleted=0).aggregate(Min('limit_num'))['limit_num__min']
        else:
            # 如果表没获取到则获取涉及库的最小limit限制
            user_limit_num = QueryPrivileges.objects.filter(
                user_name=user.username,
                cluster_name=cluster_name,
                db_name=dbName,
                valid_date__gte=datetime.datetime.now(),
                is_deleted=0).aggregate(Min('limit_num'))['limit_num__min']
        if int(limit_num) == 0:
            limit_num = user_limit_num
        else:
            limit_num = min(int(limit_num), user_limit_num)
    finalResult['data'] = limit_num
    return finalResult
Exemple #19
0
def _send(audit_id, msg_type, **kwargs):
    msg_sender = MailSender()
    sys_config = SysConfig().sys_config
    audit_info = WorkflowAudit.objects.get(audit_id=audit_id)
    workflow_id = audit_info.workflow_id
    workflow_type = audit_info.workflow_type
    status = audit_info.current_status
    workflow_title = audit_info.workflow_title
    workflow_from = audit_info.create_user_display
    workflow_url = kwargs.get('workflow_url')
    webhook_url = SqlGroup.objects.get(group_id=audit_info.group_id).ding_webhook

    audit_info = WorkflowAudit.objects.get(workflow_id=workflow_id, workflow_type=workflow_type)
    if audit_info.audit_auth_groups == '':
        workflow_auditors = '无需审批'
    else:
        try:
            workflow_auditors = '->'.join([Group.objects.get(id=auth_group_id).name for auth_group_id in
                                           audit_info.audit_auth_groups.split(',')])
        except Exception:
            workflow_auditors = audit_info.audit_auth_groups
    if audit_info.current_audit == '-1':
        current_workflow_auditors = None
    else:
        try:
            current_workflow_auditors = Group.objects.get(id=audit_info.current_audit).name
        except Exception:
            current_workflow_auditors = audit_info.current_audit

    # 准备消息内容
    if workflow_type == WorkflowDict.workflow_type['query']:
        workflow_type_display = WorkflowDict.workflow_type['query_display']
        workflow_detail = QueryPrivilegesApply.objects.get(apply_id=workflow_id)
        workflow_audit_remark = ''
        if workflow_detail.priv_type == 1:
            workflow_content = '''数据库清单:{}\n授权截止时间:{}\n结果集:{}\n'''.format(
                workflow_detail.db_list,
                datetime.datetime.strftime(workflow_detail.valid_date, '%Y-%m-%d %H:%M:%S'),
                workflow_detail.limit_num)
        elif workflow_detail.priv_type == 2:
            workflow_content = '''数据库:{}\n表清单:{}\n授权截止时间:{}\n结果集:{}\n'''.format(
                workflow_detail.db_list,
                workflow_detail.table_list,
                datetime.datetime.strftime(workflow_detail.valid_date, '%Y-%m-%d %H:%M:%S'),
                workflow_detail.limit_num)
    elif workflow_type == WorkflowDict.workflow_type['sqlreview']:
        workflow_type_display = WorkflowDict.workflow_type['sqlreview_display']
        workflow_detail = SqlWorkflow.objects.get(pk=workflow_id)
        workflow_audit_remark = workflow_detail.audit_remark
        workflow_content = workflow_detail.sql_content
    else:
        raise Exception('工单类型不正确')

    # 准备消息格式
    if status == WorkflowDict.workflow_status['audit_wait']:  # 申请阶段
        msg_title = "[{}]新的工单申请#{}".format(workflow_type_display, audit_id)
        # 接收人,发送给该项目组内对应权限组所有的用户
        auth_group_names = Group.objects.get(id=audit_info.current_audit).name
        msg_email_reciver = [user.email for user in
                             auth_group_users([auth_group_names], audit_info.group_id)]
        # 抄送对象
        email_cc = kwargs.get('email_cc', [])
        msg_email_cc = email_cc
        msg_content = '''发起人:{}\n审批流程:{}\n当前审批:{}\n工单名称:{}\n工单地址:{}\n工单详情预览:{}\n'''.format(
            workflow_from,
            workflow_auditors,
            current_workflow_auditors,
            workflow_title,
            workflow_url,
            workflow_content)
    elif status == WorkflowDict.workflow_status['audit_success']:  # 审核通过
        msg_title = "[{}]工单审核通过#{}".format(workflow_type_display, audit_id)
        # 接收人
        msg_email_reciver = [Users.objects.get(username=audit_info.create_user).email]
        # 抄送对象
        msg_email_cc = kwargs.get('email_cc', [])
        msg_content = '''发起人:{}\n审批流程:{}\n工单名称:{}\n工单地址:{}\n工单详情预览:{}\n'''.format(
            workflow_from,
            workflow_auditors,
            workflow_title,
            workflow_url,
            workflow_content)
    elif status == WorkflowDict.workflow_status['audit_reject']:  # 审核驳回
        msg_title = "[{}]工单被驳回#{}".format(workflow_type_display, audit_id)
        # 接收人
        msg_email_reciver = [Users.objects.get(username=audit_info.create_user).email]
        msg_email_cc = []
        msg_content = '''工单名称:{}\n工单地址:{}\n驳回原因:{}\n提醒:此工单被审核不通过,请按照驳回原因进行修改!'''.format(
            workflow_title,
            workflow_url,
            workflow_audit_remark)
    elif status == WorkflowDict.workflow_status['audit_abort']:  # 审核取消,通知所有审核人
        msg_title = "[{}]提交人主动终止工单#{}".format(workflow_type_display, audit_id)
        # 接收人,发送给该项目组内对应权限组所有的用户
        auth_group_names = [Group.objects.get(id=auth_group_id).name for auth_group_id in
                            audit_info.audit_auth_groups.split(',')]
        msg_email_reciver = [user.email for user in auth_group_users(auth_group_names, audit_info.group_id)]
        msg_email_cc = []
        msg_content = '''发起人:{}\n工单名称:{}\n工单地址:{}\n提醒:提交人主动终止流程'''.format(
            workflow_from,
            workflow_title,
            workflow_url)
    else:
        raise Exception('工单状态不正确')

    if isinstance(msg_email_reciver, str):
        msg_email_reciver = [msg_email_reciver]
    if isinstance(msg_email_cc, str):
        msg_email_cc = [msg_email_cc]

    # 判断是发送钉钉还是发送邮件
    try:
        if msg_type == 0:
            if sys_config.get('mail') == 'true':
                msg_sender.send_email(msg_title, msg_content, msg_email_reciver, listCcAddr=msg_email_cc)
            if sys_config.get('ding') == 'true':
                msg_sender.send_ding(webhook_url, msg_title + '\n' + msg_content)
        if msg_type == 1:
            if sys_config.get('mail') == 'true':
                msg_sender.send_email(msg_title, msg_content, msg_email_reciver, listCcAddr=msg_email_cc)
        elif msg_type == 2:
            if sys_config.get('ding') == 'true':
                msg_sender.send_ding(webhook_url, msg_title + '\n' + msg_content)
    except Exception:
        logger.error(traceback.format_exc())
Exemple #20
0
def slowquery_review_history(request):
    cluster_name = request.POST.get('cluster_name')

    # 判断是RDS还是其他实例
    cluster_info = master_config.objects.get(cluster_name=cluster_name)
    if SysConfig().sys_config.get('aliyun_rds_manage') == 'true':
        # 调用阿里云慢日志接口
        result = aliyun_rds_slowquery_review_history(request)
    else:
        StartTime = request.POST.get('StartTime')
        EndTime = request.POST.get('EndTime')
        DBName = request.POST.get('db_name')
        SQLId = request.POST.get('SQLId')
        limit = int(request.POST.get('limit'))
        offset = int(request.POST.get('offset'))

        # 时间处理
        if StartTime == EndTime:
            EndTime = datetime.datetime.strptime(
                EndTime, '%Y-%m-%d') + datetime.timedelta(days=1)
        limit = offset + limit
        # SQLId、DBName非必传
        if SQLId:
            # 获取慢查明细数据
            slowsql_record_obj = SlowQueryHistory.objects.filter(
                hostname_max=(cluster_info.master_host + ':' +
                              str(cluster_info.master_port)),
                checksum=int(SQLId),
                ts_min__range=(StartTime, EndTime)).annotate(
                    ExecutionStartTime=F('ts_min'),  # 执行开始时间
                    DBName=F('db_max'),  # 数据库名
                    HostAddress=F('user_max'),  # 用户名
                    SQLText=F('sample'),  # SQL语句
                    QueryTimes=F('query_time_sum'),  # 执行时长(秒)
                    LockTimes=F('lock_time_sum'),  # 锁定时长(秒)
                    ParseRowCounts=F('rows_examined_sum'),  # 解析行数
                    ReturnRowCounts=F('rows_sent_sum')  # 返回行数
                ).values('ExecutionStartTime', 'DBName', 'HostAddress',
                         'SQLText', 'QueryTimes', 'LockTimes',
                         'ParseRowCounts', 'ReturnRowCounts')[offset:limit]

            slowsql_obj_count = SlowQueryHistory.objects.filter(
                hostname_max=(cluster_info.master_host + ':' +
                              str(cluster_info.master_port)),
                checksum=int(SQLId),
                ts_min__range=(StartTime, EndTime)).count()
        else:
            if DBName:
                # 获取慢查明细数据
                slowsql_record_obj = SlowQueryHistory.objects.filter(
                    hostname_max=(cluster_info.master_host + ':' +
                                  str(cluster_info.master_port)),
                    db_max=DBName,
                    ts_min__range=(StartTime, EndTime)).annotate(
                        ExecutionStartTime=F('ts_min'),  # 执行开始时间
                        DBName=F('db_max'),  # 数据库名
                        HostAddress=F('user_max'),  # 用户名
                        SQLText=F('sample'),  # SQL语句
                        QueryTimes=F('query_time_sum'),  # 执行时长(秒)
                        LockTimes=F('lock_time_sum'),  # 锁定时长(秒)
                        ParseRowCounts=F('rows_examined_sum'),  # 解析行数
                        ReturnRowCounts=F('rows_sent_sum')  # 返回行数
                    ).values('ExecutionStartTime', 'DBName', 'HostAddress',
                             'SQLText', 'QueryTimes', 'LockTimes',
                             'ParseRowCounts',
                             'ReturnRowCounts')[offset:limit]  # 执行总次数倒序排列

                slowsql_obj_count = SlowQueryHistory.objects.filter(
                    hostname_max=(cluster_info.master_host + ':' +
                                  str(cluster_info.master_port)),
                    db_max=DBName,
                    ts_min__range=(StartTime, EndTime)).count()
            else:
                # 获取慢查明细数据
                slowsql_record_obj = SlowQueryHistory.objects.filter(
                    hostname_max=(cluster_info.master_host + ':' +
                                  str(cluster_info.master_port)),
                    ts_min__range=(StartTime, EndTime)).annotate(
                        ExecutionStartTime=F('ts_min'),  # 执行开始时间
                        DBName=F('db_max'),  # 数据库名
                        HostAddress=F('user_max'),  # 用户名
                        SQLText=F('sample'),  # SQL语句
                        QueryTimes=F('query_time_sum'),  # 执行时长(秒)
                        LockTimes=F('lock_time_sum'),  # 锁定时长(秒)
                        ParseRowCounts=F('rows_examined_sum'),  # 解析行数
                        ReturnRowCounts=F('rows_sent_sum')  # 返回行数
                    ).values('ExecutionStartTime', 'DBName', 'HostAddress',
                             'SQLText', 'QueryTimes', 'LockTimes',
                             'ParseRowCounts',
                             'ReturnRowCounts')[offset:limit]  # 执行总次数倒序排列

                slowsql_obj_count = SlowQueryHistory.objects.filter(
                    hostname_max=(cluster_info.master_host + ':' +
                                  str(cluster_info.master_port)),
                    ts_min__range=(StartTime, EndTime)).count()
        # QuerySet 序列化
        SQLSlowRecord = [SlowRecord for SlowRecord in slowsql_record_obj]
        result = {"total": slowsql_obj_count, "rows": SQLSlowRecord}

        # 返回查询结果
    return HttpResponse(json.dumps(result,
                                   cls=ExtendJSONEncoder,
                                   bigint_as_string=True),
                        content_type='application/json')