Exemple #1
0
def execute_skipinc_call_back(workflowId, instance_name, db_name, sql_content,
                              url):
    workflowDetail = SqlWorkflow.objects.get(id=workflowId)
    try:
        # 执行sql
        t_start = time.time()
        execute_result = Dao(instance_name=instance_name).mysql_execute(
            db_name, sql_content)
        t_end = time.time()
        execute_time = "%5s" % "{:.4f}".format(t_end - t_start)
        execute_result['execute_time'] = execute_time + 'sec'

        workflowDetail = SqlWorkflow.objects.get(id=workflowId)
        if execute_result.get('Warning'):
            workflowDetail.status = Const.workflowStatus['exception']
        elif execute_result.get('Error'):
            workflowDetail.status = Const.workflowStatus['exception']
        else:
            workflowDetail.status = Const.workflowStatus['finish']
        workflowDetail.finish_time = timezone.now()
        workflowDetail.execute_result = json.dumps(execute_result)
        workflowDetail.is_manual = 1
        workflowDetail.audit_remark = ''
        workflowDetail.is_backup = '否'
        # 关闭后重新获取连接,防止超时
        connection.close()
        workflowDetail.save()
    except Exception as e:
        logger.error(e)

    # 发送消息
    send_msg(workflowDetail, url)
Exemple #2
0
 def __init__(self, instance_name, db_name, sqltext):
     self.dao = Dao(instance_name=instance_name, flag=True)
     self.db_name = db_name
     self.sqltext = sqltext
     self.sql_variable = '''
 select
   lower(variable_name),
   variable_value
 from performance_schema.global_variables
 where upper(variable_name) in ('%s')
 order by variable_name;''' % ('\',\''.join(SQLTuning.SYS_PARM_FILTER))
     self.sql_optimizer_switch = '''
 select variable_value
 from performance_schema.global_variables
 where upper(variable_name) = 'OPTIMIZER_SWITCH';
 '''
     self.sql_table_info = '''
 select
   table_name,
   engine,
   row_format                                           as format,
   table_rows,
   avg_row_length                                       as avg_row,
   round((data_length + index_length) / 1024 / 1024, 2) as total_mb,
   round((data_length) / 1024 / 1024, 2)                as data_mb,
   round((index_length) / 1024 / 1024, 2)               as index_mb
 from information_schema.tables
 where table_schema = '%s' and table_name = '%s'
 '''
     self.sql_table_index = '''
Exemple #3
0
def create_kill_session(request):
    instance_name = request.POST.get('instance_name')
    ThreadIDs = request.POST.get('ThreadIDs')

    result = {'status': 0, 'msg': 'ok', 'data': []}
    # 判断是RDS还是其他实例
    if len(AliyunRdsConfig.objects.filter(instance_name=instance_name)) > 0:
        if SysConfig().sys_config.get('aliyun_rds_manage') == 'true':
            result = aliyun_create_kill_session(request)
        else:
            raise Exception('未开启rds管理,无法查看rds数据!')
    else:
        ThreadIDs = ThreadIDs.replace('[', '').replace(']', '')
        sql = "select concat('kill ', id, ';') from information_schema.processlist where id in ({});".format(
            ThreadIDs)
        all_kill_sql = Dao(instance_name=instance_name).mysql_query(
            'information_schema', sql)
        kill_sql = ''
        for row in all_kill_sql['rows']:
            kill_sql = kill_sql + row[0]
        result['data'] = kill_sql
    # 返回查询结果
    return HttpResponse(json.dumps(result,
                                   cls=ExtendJSONEncoder,
                                   bigint_as_string=True),
                        content_type='application/json')
Exemple #4
0
 def statistic(self):
     sql = '''
          select
              sum(deleting)     deleting,
              sum(inserting)    inserting,
              sum(updating)     updating,
              sum(selecting)    selecting,
              sum(altertable)   altertable,
              sum(renaming)     renaming,
              sum(createindex)  createindex,
              sum(dropindex)    dropindex,
              sum(addcolumn)    addcolumn,
              sum(dropcolumn)   dropcolumn,
              sum(changecolumn) changecolumn,
              sum(alteroption)  alteroption,
              sum(alterconvert) alterconvert,
              sum(createtable)  createtable,
              sum(droptable)    droptable,
              sum(createdb)     createdb,
              sum(truncating)   truncating
            from statistic;'''
     return Dao().mysql_query(self.inception_remote_backup_host,
                              self.inception_remote_backup_port,
                              self.inception_remote_backup_user,
                              self.inception_remote_backup_password,
                              'inception',
                              sql)
Exemple #5
0
 def statistic(self):
     sql = '''
          select
              sum(deleting)     deleting,
              sum(inserting)    inserting,
              sum(updating)     updating,
              sum(selecting)    selecting,
              sum(altertable)   altertable,
              sum(renaming)     renaming,
              sum(createindex)  createindex,
              sum(dropindex)    dropindex,
              sum(addcolumn)    addcolumn,
              sum(dropcolumn)   dropcolumn,
              sum(changecolumn) changecolumn,
              sum(alteroption)  alteroption,
              sum(alterconvert) alterconvert,
              sum(createtable)  createtable,
              sum(droptable)    droptable,
              sum(createdb)     createdb,
              sum(truncating)   truncating
            from statistic;'''
     try:
         return Dao(
             host=self.inception_remote_backup_host,
             user=self.inception_remote_backup_user,
             port=self.inception_remote_backup_port,
             password=self.inception_remote_backup_password).mysql_query(
                 'inception', sql)
     except Exception:
         return {'column_list': [], 'rows': [], 'effect_row': 0}
Exemple #6
0
def process(request):
    instance_name = request.POST.get('instance_name')
    command_type = request.POST.get('command_type')

    base_sql = "select id, user, host, db, command, time, state, ifnull(info,'') as info from information_schema.processlist"
    # 判断是RDS还是其他实例
    if len(
            AliyunRdsConfig.objects.filter(instance_name=instance_name,
                                           is_enable=1)) > 0:
        result = aliyun_process_status(request)
    else:
        if command_type == 'All':
            sql = base_sql + ";"
        elif command_type == 'Not Sleep':
            sql = "{} where command<>'Sleep';".format(base_sql)
        else:
            sql = "{} where command= '{}';".format(base_sql, command_type)
        processlist = Dao(instance_name=instance_name).mysql_query(
            'information_schema', sql)
        column_list = processlist['column_list']
        rows = []
        for row in processlist['rows']:
            row_info = {}
            for row_index, row_item in enumerate(row):
                row_info[column_list[row_index]] = row_item
            rows.append(row_info)
        result = {'status': 0, 'msg': 'ok', 'data': rows}

    # 返回查询结果
    return HttpResponse(json.dumps(result,
                                   cls=ExtendJSONEncoder,
                                   bigint_as_string=True),
                        content_type='application/json')
Exemple #7
0
def create_kill_session(request):
    instance_name = request.POST.get('instance_name')
    thread_ids = request.POST.get('ThreadIDs')

    result = {'status': 0, 'msg': 'ok', 'data': []}
    # 判断是RDS还是其他实例
    if len(
            AliyunRdsConfig.objects.filter(instance_name=instance_name,
                                           is_enable=1)) > 0:
        result = aliyun_create_kill_session(request)
    else:
        thread_ids = thread_ids.replace('[', '').replace(']', '')
        sql = "select concat('kill ', id, ';') from information_schema.processlist where id in ({});".format(
            thread_ids)
        all_kill_sql = Dao(instance_name=instance_name).mysql_query(
            'information_schema', sql)
        kill_sql = ''
        for row in all_kill_sql['rows']:
            kill_sql = kill_sql + row[0]
        result['data'] = kill_sql
    # 返回查询结果
    return HttpResponse(json.dumps(result,
                                   cls=ExtendJSONEncoder,
                                   bigint_as_string=True),
                        content_type='application/json')
Exemple #8
0
def execute_skipinc_call_back(workflowId, instance_name, db_name, sql_content,
                              url):
    workflowDetail = SqlWorkflow.objects.get(id=workflowId)
    try:
        # 执行sql
        t_start = time.time()
        execute_result = Dao(instance_name=instance_name).mysql_execute(
            db_name, sql_content)
        t_end = time.time()
        execute_time = "%5s" % "{:.4f}".format(t_end - t_start)
        execute_result['execute_time'] = execute_time + 'sec'

        workflowDetail = SqlWorkflow.objects.get(id=workflowId)
        if execute_result.get('Warning'):
            workflowDetail.status = Const.workflowStatus['exception']
        elif execute_result.get('Error'):
            workflowDetail.status = Const.workflowStatus['exception']
        else:
            workflowDetail.status = Const.workflowStatus['finish']
        workflowDetail.finish_time = timezone.now()
        workflowDetail.execute_result = json.dumps(execute_result)
        workflowDetail.is_manual = 1
        workflowDetail.audit_remark = ''
        workflowDetail.is_backup = '否'
        # 关闭后重新获取连接,防止超时
        connection.close()
        workflowDetail.save()
    except Exception:
        logger.error(traceback.format_exc())

    # 增加工单日志
    # 获取audit_id
    audit_id = Workflow.auditinfobyworkflow_id(
        workflow_id=workflowId,
        workflow_type=WorkflowDict.workflow_type['sqlreview']).audit_id
    Workflow.add_workflow_log(audit_id=audit_id,
                              operation_type=6,
                              operation_type_desc='执行结束',
                              operation_info='执行结果:{}'.format(
                                  workflowDetail.status),
                              operator='',
                              operator_display='系统')

    # 发送消息
    send_msg(workflowDetail, url)
Exemple #9
0
def users(request):
    instance_name = request.POST.get('instance_name')
    sql_get_user = '''select concat("\'", user, "\'", '@', "\'", host,"\'") as query from mysql.user;'''
    dao = Dao(instance_name=instance_name)
    db_users = dao.mysql_query('mysql', sql_get_user)['rows']
    # 获取用户权限信息
    data = []
    for db_user in db_users:
        user_info = {}
        user_priv = dao.mysql_query('mysql', 'show grants for {};'.format(
            db_user[0]))['rows']
        user_info['user'] = db_user[0]
        user_info['privileges'] = user_priv
        data.append(user_info)

    result = {'status': 0, 'msg': 'ok', 'data': data}
    return HttpResponse(json.dumps(result,
                                   cls=ExtendJSONEncoder,
                                   bigint_as_string=True),
                        content_type='application/json')
Exemple #10
0
def get_db_name_list(request):
    instance_name = request.POST.get('instance_name')
    result = {'status': 0, 'msg': 'ok', 'data': []}

    try:
        # 取出该实例的连接方式,为了后面连进去获取所有databases
        db_list = Dao(instance_name=instance_name).get_alldb_by_cluster()
        # 要把result转成JSON存进数据库里,方便SQL单子详细信息展示
        result['data'] = db_list
    except Exception as msg:
        result['status'] = 1
        result['msg'] = str(msg)

    return HttpResponse(json.dumps(result), content_type='application/json')
Exemple #11
0
def execute_skipinc_call_back(workflowId, clusterName, db_name, sql_content, url):
    workflowDetail = SqlWorkflow.objects.get(id=workflowId)
    # 获取审核人
    reviewMan = workflowDetail.review_man

    # 获取实例连接信息
    masterInfo = getMasterConnStr(clusterName)
    try:
        # 执行sql
        t_start = time.time()
        execute_result = Dao().mysql_execute(masterInfo['masterHost'], masterInfo['masterPort'],
                                             masterInfo['masterUser'],
                                             masterInfo['masterPassword'], db_name, sql_content)
        t_end = time.time()
        execute_time = "%5s" % "{:.4f}".format(t_end - t_start)
        execute_result['execute_time'] = execute_time + 'sec'

        workflowDetail = SqlWorkflow.objects.get(id=workflowId)
        if execute_result.get('Warning'):
            workflowDetail.status = Const.workflowStatus['exception']
        elif execute_result.get('Error'):
            workflowDetail.status = Const.workflowStatus['exception']
        else:
            workflowDetail.status = Const.workflowStatus['finish']
        workflowDetail.finish_time = timezone.now()
        workflowDetail.execute_result = json.dumps(execute_result)
        workflowDetail.is_manual = 1
        workflowDetail.audit_remark = ''
        workflowDetail.is_backup = '否'
        # 关闭后重新获取连接,防止超时
        connection.close()
        workflowDetail.save()
    except Exception as e:
        logger.error(e)

    # 发送消息
    send_msg(workflowDetail, url)
Exemple #12
0
def getTableNameList(request):
    instance_name = request.POST.get('instance_name')
    db_name = request.POST.get('db_name')
    result = {'status': 0, 'msg': 'ok', 'data': []}

    try:
        # 取出该实例从库的连接方式,为了后面连进去获取所有的表
        tb_list = Dao(instance_name=instance_name).getAllTableByDb(db_name)
        # 要把result转成JSON存进数据库里,方便SQL单子详细信息展示
        result['data'] = tb_list
    except Exception as msg:
        result['status'] = 1
        result['msg'] = str(msg)

    return HttpResponse(json.dumps(result), content_type='application/json')
Exemple #13
0
def get_column_name_list(request):
    instance_name = request.POST.get('instance_name')
    db_name = request.POST.get('db_name')
    tb_name = request.POST.get('tb_name')
    result = {'status': 0, 'msg': 'ok', 'data': []}

    try:
        # 取出该实例的连接方式,为了后面连进去获取表的所有字段
        col_list = Dao(instance_name=instance_name).get_all_columns_by_tb(db_name, tb_name)
        # 要把result转成JSON存进数据库里,方便SQL单子详细信息展示
        result['data'] = col_list
    except Exception as msg:
        result['status'] = 1
        result['msg'] = str(msg)
    return HttpResponse(json.dumps(result), content_type='application/json')
Exemple #14
0
def tablesapce(request):
    instance_name = request.POST.get('instance_name')

    # 判断是RDS还是其他实例
    if len(
            AliyunRdsConfig.objects.filter(instance_name=instance_name,
                                           is_enable=1)) > 0:
        if SysConfig().sys_config.get('aliyun_rds_manage'):
            result = aliyun_sapce_status(request)
        else:
            raise Exception('未开启rds管理,无法查看rds数据!')
    else:
        sql = '''
        SELECT
          table_schema,
          table_name,
          engine,
          TRUNCATE((data_length+index_length+data_free)/1024/1024,2) AS total_size,
          table_rows,
          TRUNCATE(data_length/1024/1024,2) AS data_size,
          TRUNCATE(index_length/1024/1024,2) AS index_size,
          TRUNCATE(data_free/1024/1024,2) AS data_free,
          TRUNCATE(data_free/(data_length+index_length+data_free)*100,2) AS pct_free
        FROM information_schema.tables 
        WHERE table_schema NOT IN ('information_schema', 'performance_schema', 'mysql', 'test', 'sys')
          ORDER BY total_size DESC 
        LIMIT 14;'''.format(instance_name)
        table_space = Dao(instance_name=instance_name).mysql_query(
            'information_schema', sql)
        column_list = table_space['column_list']
        rows = []
        for row in table_space['rows']:
            row_info = {}
            for row_index, row_item in enumerate(row):
                row_info[column_list[row_index]] = row_item
            rows.append(row_info)

        result = {'status': 0, 'msg': 'ok', 'data': rows}

    # 返回查询结果
    return HttpResponse(json.dumps(result,
                                   cls=ExtendJSONEncoder,
                                   bigint_as_string=True),
                        content_type='application/json')
Exemple #15
0
def binlog_list(request):
    instance_name = request.POST.get('instance_name')
    binlog = Dao(instance_name=instance_name).mysql_query(
        'information_schema', 'show binary logs;')
    column_list = binlog['column_list']
    rows = []
    for row in binlog['rows']:
        row_info = {}
        for row_index, row_item in enumerate(row):
            row_info[column_list[row_index]] = row_item
        rows.append(row_info)

    result = {'status': 0, 'msg': 'ok', 'data': rows}

    # 返回查询结果
    return HttpResponse(json.dumps(result,
                                   cls=ExtendJSONEncoder,
                                   bigint_as_string=True),
                        content_type='application/json')
Exemple #16
0
def explain(request):
    sql_content = request.POST.get('sql_content')
    instance_name = request.POST.get('instance_name')
    db_name = request.POST.get('db_name')
    result = {'status': 0, 'msg': 'ok', 'data': []}

    # 服务器端参数验证
    if sql_content is None or instance_name is None:
        result['status'] = 1
        result['msg'] = '页面提交参数可能为空'
        return HttpResponse(json.dumps(result),
                            content_type='application/json')

    sql_content = sql_content.strip()
    if sql_content[-1] != ";":
        result['status'] = 1
        result['msg'] = 'SQL语句结尾没有以;结尾,请重新修改并提交!'
        return HttpResponse(json.dumps(result),
                            content_type='application/json')

    # 过滤非查询的语句
    if re.match(r"^explain", sql_content.lower()):
        pass
    else:
        result['status'] = 1
        result['msg'] = '仅支持explain开头的语句,请检查'
        return HttpResponse(json.dumps(result),
                            content_type='application/json')

    # 按照分号截取第一条有效sql执行
    sql_content = sql_content.strip().split(';')[0]

    # 执行获取执行计划语句
    sql_result = Dao(instance_name=instance_name).mysql_query(
        str(db_name), sql_content)

    result['data'] = sql_result

    # 返回查询结果
    return HttpResponse(json.dumps(result,
                                   cls=ExtendJSONEncoder,
                                   bigint_as_string=True),
                        content_type='application/json')
Exemple #17
0
def instanceusercreate(request):
    if request.GET:

        instances = [
            instance.instance_name
            for instance in user_instances(request.user, 'all')
        ]
        exteral_ip = get_out_ip()

        context = {'instances': instances, 'user_host': exteral_ip}
        return render(request, 'instanceusercreate.html', context)
    else:
        user_name = request.POST.get('user', '')
        password = request.POST.get('password', '')
        privileges = request.POST.get('privilegs')
        instance_name = request.POST.get('instance_name')
        user_host = request.POST.get('user_host')
        schema = request.POST.get('schema')
        tables = request.POST.get('tables')
        if user_name == '' and privileges == '' and user_host == '':
            error = "info is not empty"

        if password == '':
            password = generate_random_password(32)
        if schema is None or schema == '':
            schema = '*'
        data = {}
        data["user"] = user_name
        data['host'] = user_host
        data['schema'] = schema
        data['tables'] = tables
        result = {'status': 0, 'msg': 'ok', 'data': []}

        try:
            # 取出该实例实例的连接方式,为了后面连进去获取所有的表
            tb_list = Dao(instance_name=instance_name).mysql_createuser(data)
            # 要把result转成JSON存进数据库里,方便SQL单子详细信息展示
            result['data'] = tb_list
        except Exception as msg:
            result['status'] = 1
            result['msg'] = str(msg)
        return
Exemple #18
0
def kill_session(request):
    instance_name = request.POST.get('instance_name')
    request_params = request.POST.get('request_params')

    result = {'status': 0, 'msg': 'ok', 'data': []}
    # 判断是RDS还是其他实例
    if len(
            AliyunRdsConfig.objects.filter(instance_name=instance_name,
                                           is_enable=1)) > 0:
        result = aliyun_kill_session(request)
    else:
        kill_sql = request_params
        Dao(instance_name=instance_name).mysql_execute('information_schema',
                                                       kill_sql)

    # 返回查询结果
    return HttpResponse(json.dumps(result,
                                   cls=ExtendJSONEncoder,
                                   bigint_as_string=True),
                        content_type='application/json')
Exemple #19
0
def kill_session(request):
    instance_name = request.POST.get('instance_name')
    request_params = request.POST.get('request_params')

    result = {'status': 0, 'msg': 'ok', 'data': []}
    # 判断是RDS还是其他实例
    if len(AliyunRdsConfig.objects.filter(instance_name=instance_name)) > 0:
        if SysConfig().sys_config.get('aliyun_rds_manage') == 'true':
            result = aliyun_kill_session(request)
        else:
            raise Exception('未开启rds管理,无法查看rds数据!')
    else:
        kill_sql = request_params
        Dao(instance_name=instance_name).mysql_execute('information_schema',
                                                       kill_sql)

    # 返回查询结果
    return HttpResponse(json.dumps(result,
                                   cls=ExtendJSONEncoder,
                                   bigint_as_string=True),
                        content_type='application/json')
Exemple #20
0
def trxandlocks(request):
    instance_name = request.POST.get('instance_name')
    sql = '''
    SELECT
      rtrx.`trx_state`                                                        AS "等待的状态",
      rtrx.`trx_started`                                                      AS "等待事务开始时间",
      rtrx.`trx_wait_started`                                                 AS "等待事务等待开始时间",
      lw.`requesting_trx_id`                                                  AS "等待事务ID",
      rtrx.trx_mysql_thread_id                                                AS "等待事务线程ID",
      rtrx.`trx_query`                                                        AS "等待事务的sql",
      CONCAT(rl.`lock_mode`, '-', rl.`lock_table`, '(', rl.`lock_index`, ')') AS "等待的表信息",
      rl.`lock_id`                                                            AS "等待的锁id",
      lw.`blocking_trx_id`                                                    AS "运行的事务id",
      trx.trx_mysql_thread_id                                                 AS "运行的事务线程id",
      CONCAT(l.`lock_mode`, '-', l.`lock_table`, '(', l.`lock_index`, ')')    AS "运行的表信息",
      l.lock_id                                                               AS "运行的锁id",
      trx.`trx_state`                                                         AS "运行事务的状态",
      trx.`trx_started`                                                       AS "运行事务的时间",
      trx.`trx_wait_started`                                                  AS "运行事务的等待开始时间",
      trx.`trx_query`                                                         AS "运行事务的sql"
    FROM information_schema.`INNODB_LOCKS` rl
      , information_schema.`INNODB_LOCKS` l
      , information_schema.`INNODB_LOCK_WAITS` lw
      , information_schema.`INNODB_TRX` rtrx
      , information_schema.`INNODB_TRX` trx
    WHERE rl.`lock_id` = lw.`requested_lock_id`
          AND l.`lock_id` = lw.`blocking_lock_id`
          AND lw.requesting_trx_id = rtrx.trx_id
          AND lw.blocking_trx_id = trx.trx_id;'''

    trxandlocks = Dao(instance_name=instance_name).mysql_query(
        'information_schema', sql)
    result = {'status': 0, 'msg': 'ok', 'data': trxandlocks}

    # 返回查询结果
    return HttpResponse(json.dumps(result,
                                   cls=ExtendJSONEncoder,
                                   bigint_as_string=True),
                        content_type='application/json')
Exemple #21
0
def instance(request):
    result = {'status': 0, 'msg': 'ok', 'data': []}
    instance_id = request.POST.get('instance_id')
    instance_name = Instance.objects.get(id=instance_id).instance_name
    dao = Dao(instance_name=instance_name)
    try:
        conn = MySQLdb.connect(host=dao.host,
                               port=dao.port,
                               user=dao.user,
                               passwd=dao.password,
                               charset='utf8')
        cursor = conn.cursor()
        sql = "select 1"
        cursor.execute(sql)
    except Exception as e:
        result['status'] = 1
        result['msg'] = '无法连接实例{},\n{}'.format(instance_name, str(e))
    else:
        cursor.close()
        conn.close()
    # 返回结果
    return HttpResponse(json.dumps(result), content_type='application/json')
Exemple #22
0
def query(request):
    instance_name = request.POST.get('instance_name')
    sqlContent = request.POST.get('sql_content')
    dbName = request.POST.get('db_name')
    limit_num = request.POST.get('limit_num')

    finalResult = {'status': 0, 'msg': 'ok', 'data': {}}

    # 服务器端参数验证
    if sqlContent is None or dbName is None or instance_name is None or limit_num is None:
        finalResult['status'] = 1
        finalResult['msg'] = '页面提交参数可能为空'
        return HttpResponse(json.dumps(finalResult), content_type='application/json')

    sqlContent = sqlContent.strip()
    if sqlContent[-1] != ";":
        finalResult['status'] = 1
        finalResult['msg'] = 'SQL语句结尾没有以;结尾,请重新修改并提交!'
        return HttpResponse(json.dumps(finalResult), content_type='application/json')

    # 获取用户信息
    user = request.user

    # 过滤注释语句和非查询的语句
    sqlContent = ''.join(
        map(lambda x: re.compile(r'(^--\s+.*|^/\*.*\*/;\s*$)').sub('', x, count=1),
            sqlContent.splitlines(1))).strip()
    # 去除空行
    sqlContent = re.sub('[\r\n\f]{2,}', '\n', sqlContent)

    sql_list = sqlContent.strip().split('\n')
    for sql in sql_list:
        if re.match(r"^select|^show|^explain", sql.lower()):
            break
        else:
            finalResult['status'] = 1
            finalResult['msg'] = '仅支持^select|^show|^explain语法,请联系管理员!'
            return HttpResponse(json.dumps(finalResult), content_type='application/json')

    # 取出该实例的连接方式,查询只读账号,按照分号截取第一条有效sql执行
    slave_info = Instance.objects.get(instance_name=instance_name)
    sqlContent = sqlContent.strip().split(';')[0]

    # 查询权限校验
    priv_check_info = query_priv_check(user, instance_name, dbName, sqlContent, limit_num)

    if priv_check_info['status'] == 0:
        limit_num = priv_check_info['data']
    else:
        return HttpResponse(json.dumps(priv_check_info), content_type='application/json')

    if re.match(r"^explain", sqlContent.lower()):
        limit_num = 0

    # 对查询sql增加limit限制
    if re.match(r"^select", sqlContent.lower()):
        if re.search(r"limit\s+(\d+)$", sqlContent.lower()) is None:
            if re.search(r"limit\s+\d+\s*,\s*(\d+)$", sqlContent.lower()) is None:
                sqlContent = sqlContent + ' limit ' + str(limit_num)

    sqlContent = sqlContent + ';'

    # 执行查询语句,统计执行时间
    t_start = time.time()
    sql_result = Dao(instance_name=instance_name).mysql_query(str(dbName), sqlContent, limit_num)
    t_end = time.time()
    cost_time = "%5s" % "{:.4f}".format(t_end - t_start)

    sql_result['cost_time'] = cost_time

    # 数据脱敏,同样需要检查配置,是否开启脱敏,语法树解析是否允许出错继续执行
    t_start = time.time()
    if SysConfig().sys_config.get('data_masking') == 'true':
        # 仅对查询语句进行脱敏
        if re.match(r"^select", sqlContent.lower()):
            try:
                masking_result = datamasking.data_masking(instance_name, dbName, sqlContent, sql_result)
            except Exception:
                if SysConfig().sys_config.get('query_check') == 'true':
                    finalResult['status'] = 1
                    finalResult['msg'] = '脱敏数据报错,请联系管理员'
                    return HttpResponse(json.dumps(finalResult), content_type='application/json')
            else:
                if masking_result['status'] != 0:
                    if SysConfig().sys_config.get('query_check') == 'true':
                        return HttpResponse(json.dumps(masking_result), content_type='application/json')

    t_end = time.time()
    masking_cost_time = "%5s" % "{:.4f}".format(t_end - t_start)

    sql_result['masking_cost_time'] = masking_cost_time

    finalResult['data'] = sql_result

    # 成功的查询语句记录存入数据库
    if sql_result.get('Error'):
        pass
    else:
        query_log = QueryLog()
        query_log.username = user.username
        query_log.user_display = user.display
        query_log.db_name = dbName
        query_log.instance_name = instance_name
        query_log.sqllog = sqlContent
        if int(limit_num) == 0:
            limit_num = int(sql_result['effect_row'])
        else:
            limit_num = min(int(limit_num), int(sql_result['effect_row']))
        query_log.effect_row = limit_num
        query_log.cost_time = cost_time
        # 防止查询超时
        try:
            query_log.save()
        except:
            connection.close()
            query_log.save()

    # 返回查询结果
    return HttpResponse(json.dumps(finalResult, cls=ExtendJSONEncoder, bigint_as_string=True),
                        content_type='application/json')
Exemple #23
0
from django.core import serializers
from django.db import transaction
import datetime
import time

from sql.utils.extend_json_encoder import ExtendJSONEncoder
from sql.utils.aes_decryptor import Prpcrypt
from sql.utils.dao import Dao
from .const import WorkflowDict
from .models import MasterConfig, SlaveConfig, QueryPrivilegesApply, QueryPrivileges, QueryLog, SqlGroup
from sql.utils.data_masking import Masking
from sql.utils.workflow import Workflow
from sql.utils.config import SysConfig
from sql.utils.group import user_slaves

dao = Dao()
prpCryptor = Prpcrypt()
datamasking = Masking()
workflowOb = Workflow()


# 查询权限申请用于工作流审核回调
def query_audit_call_back(workflow_id, workflow_status):
    # 更新业务表状态
    apply_info = QueryPrivilegesApply()
    apply_info.apply_id = workflow_id
    apply_info.status = workflow_status
    apply_info.save(update_fields=['status'])
    # 审核通过插入权限信息,批量插入,减少性能消耗
    if workflow_status == WorkflowDict.workflow_status['audit_success']:
        apply_queryset = QueryPrivilegesApply.objects.get(apply_id=workflow_id)
Exemple #24
0
class SqlTuning(object):
    def __init__(self, instance_name, db_name, sqltext):
        self.dao = Dao(instance_name=instance_name, flag=True)
        self.db_name = db_name
        self.sqltext = sqltext
        self.sql_variable = '''
    select
      lower(variable_name),
      variable_value
    from performance_schema.global_variables
    where upper(variable_name) in ('%s')
    order by variable_name;''' % ('\',\''.join(SQLTuning.SYS_PARM_FILTER))
        self.sql_optimizer_switch = '''
    select variable_value
    from performance_schema.global_variables
    where upper(variable_name) = 'OPTIMIZER_SWITCH';
    '''
        self.sql_table_info = '''
    select
      table_name,
      engine,
      row_format                                           as format,
      table_rows,
      avg_row_length                                       as avg_row,
      round((data_length + index_length) / 1024 / 1024, 2) as total_mb,
      round((data_length) / 1024 / 1024, 2)                as data_mb,
      round((index_length) / 1024 / 1024, 2)               as index_mb
    from information_schema.tables
    where table_schema = '%s' and table_name = '%s'
    '''
        self.sql_table_index = '''
    select
      table_name,
      index_name,
      non_unique,
      seq_in_index,
      column_name,
      collation,
      cardinality,
      nullable,
      index_type
    from information_schema.statistics
    where table_schema = '%s' and table_name = '%s'
    order by 1, 3;    
    '''

    @staticmethod
    def __is_subselect(parsed):
        if not parsed.is_group:
            return False
        for item in parsed.tokens:
            if item.ttype is DML and item.value.upper() == 'SELECT':
                return True
        return False

    def __extract_from_part(self, parsed):
        from_seen = False
        for item in parsed.tokens:
            # print item.ttype,item.value
            if from_seen:
                if self.__is_subselect(item):
                    for x in self.__extract_from_part(item):
                        yield x
                elif item.ttype is Keyword:
                    raise StopIteration
                else:
                    yield item
            elif item.ttype is Keyword and item.value.upper() == 'FROM':
                from_seen = True

    @staticmethod
    def __extract_table_identifiers(token_stream):
        for item in token_stream:
            if isinstance(item, IdentifierList):
                for identifier in item.get_identifiers():
                    yield identifier.get_real_name()
            elif isinstance(item, Identifier):
                yield item.get_real_name()
            # It's a bug to check for Keyword here, but in the example
            # above some tables names are identified as keywords...
            elif item.ttype is Keyword:
                yield item.value

    def __extract_tables(self, p_sqltext):
        stream = self.__extract_from_part(sqlparse.parse(p_sqltext)[0])
        return list(self.__extract_table_identifiers(stream))

    def basic_information(self):
        return self.dao.mysql_query(sql="select @@version")

    def sys_parameter(self):
        # 获取mysql版本信息
        version = self.basic_information()['rows'][0][0]
        server_version = tuple(
            [numeric_part(n) for n in version.split('.')[:2]])
        if server_version < (5, 7):
            sql = self.sql_variable.replace('performance_schema',
                                            'information_schema')
        else:
            sql = self.sql_variable
        return self.dao.mysql_query(sql=sql)

    def optimizer_switch(self):
        # 获取mysql版本信息
        version = self.basic_information()['rows'][0][0]
        server_version = tuple(
            [numeric_part(n) for n in version.split('.')[:2]])
        if server_version < (5, 7):
            sql = self.sql_optimizer_switch.replace('performance_schema',
                                                    'information_schema')
        else:
            sql = self.sql_optimizer_switch
        return self.dao.mysql_query(sql=sql)

    def sqlplan(self):
        plan = self.dao.mysql_query(self.db_name,
                                    "explain extended " + self.sqltext)
        optimizer_rewrite_sql = self.dao.mysql_query(sql="show warnings")
        return plan, optimizer_rewrite_sql

    # 获取关联表信息存在缺陷,只能获取到一张表
    def object_statistics(self):
        tableistructure = {'column_list': [], 'rows': []}
        tableinfo = {'column_list': [], 'rows': []}
        indexinfo = {'column_list': [], 'rows': []}
        for index, table_name in enumerate(self.__extract_tables(
                self.sqltext)):
            tableistructure = self.dao.mysql_query(
                db_name=self.db_name,
                sql="show create table {};".format(
                    table_name.replace('`', '').lower()))

            tableinfo = self.dao.mysql_query(
                sql=self.sql_table_info %
                (self.db_name, table_name.replace('`', '').lower()))

            indexinfo = self.dao.mysql_query(
                sql=self.sql_table_index %
                (self.db_name, table_name.replace('`', '').lower()))
        return tableistructure, tableinfo, indexinfo

    def exec_sql(self):
        result = {
            "EXECUTE_TIME": 0,
            "BEFORE_STATUS": {
                'column_list': [],
                'rows': []
            },
            "AFTER_STATUS": {
                'column_list': [],
                'rows': []
            },
            "SESSION_STATUS(DIFFERENT)": {
                'column_list': ['status_name', 'before', 'after', 'diff'],
                'rows': []
            },
            "PROFILING_DETAIL": {
                'column_list': [],
                'rows': []
            },
            "PROFILING_SUMMARY": {
                'column_list': [],
                'rows': []
            }
        }
        sql_profiling = "select concat(upper(left(variable_name,1)),substring(lower(variable_name),2,(length(variable_name)-1))) var_name,variable_value var_value from performance_schema.session_status order by 1"

        # 获取mysql版本信息
        version = self.basic_information()['rows'][0][0]
        server_version = tuple(
            [numeric_part(n) for n in version.split('.')[:2]])
        if server_version < (5, 7):
            sql = sql_profiling.replace('performance_schema',
                                        'information_schema')
        else:
            sql = sql_profiling
        self.dao.mysql_query(sql="set profiling=1")
        records = self.dao.mysql_query(
            sql=
            "select ifnull(max(query_id),0) from INFORMATION_SCHEMA.PROFILING")
        query_id = records['rows'][0][0] + 3  # skip next sql
        # 获取执行前信息
        result['BEFORE_STATUS'] = self.dao.mysql_query(sql=sql)

        # 执行查询语句,统计执行时间
        t_start = time.time()
        self.dao.mysql_query(sql=self.sqltext)
        t_end = time.time()
        cost_time = "%5s" % "{:.4f}".format(t_end - t_start)
        result['EXECUTE_TIME'] = cost_time

        # 获取执行后信息
        result['AFTER_STATUS'] = self.dao.mysql_query(sql=sql)

        # 获取PROFILING_DETAIL信息
        result['PROFILING_DETAIL'] = self.dao.mysql_query(
            sql=
            "select STATE,DURATION,CPU_USER,CPU_SYSTEM,BLOCK_OPS_IN,BLOCK_OPS_OUT ,MESSAGES_SENT ,MESSAGES_RECEIVED ,PAGE_FAULTS_MAJOR ,PAGE_FAULTS_MINOR ,SWAPS from INFORMATION_SCHEMA.PROFILING where query_id="
            + str(query_id) + " order by seq")
        result['PROFILING_SUMMARY'] = self.dao.mysql_query(
            sql=
            "SELECT STATE,SUM(DURATION) AS Total_R,ROUND(100*SUM(DURATION)/(SELECT SUM(DURATION) FROM INFORMATION_SCHEMA.PROFILING WHERE QUERY_ID="
            + str(query_id) +
            "),2) AS Pct_R,COUNT(*) AS Calls,SUM(DURATION)/COUNT(*) AS R_Call FROM INFORMATION_SCHEMA.PROFILING WHERE QUERY_ID="
            + str(query_id) + " GROUP BY STATE ORDER BY Total_R DESC")

        # 处理执行前后对比信息
        before_status_rows = [
            list(item) for item in result['BEFORE_STATUS']['rows']
        ]
        after_status_rows = [
            list(item) for item in result['AFTER_STATUS']['rows']
        ]
        for index, item in enumerate(before_status_rows):
            if before_status_rows[index][1] != after_status_rows[index][1]:
                before_status_rows[index].append(after_status_rows[index][1])
                before_status_rows[index].append(
                    str(
                        float(after_status_rows[index][1]) -
                        float(before_status_rows[index][1])))
        diff_rows = [item for item in before_status_rows if len(item) == 4]
        result['SESSION_STATUS(DIFFERENT)']['rows'] = diff_rows
        return result
Exemple #25
0
def query(request):
    instance_name = request.POST.get('instance_name')
    sql_content = request.POST.get('sql_content')
    db_name = request.POST.get('db_name')
    limit_num = request.POST.get('limit_num')

    result = {'status': 0, 'msg': 'ok', 'data': {}}

    # 服务器端参数验证
    if sql_content is None or db_name is None or instance_name is None or limit_num is None:
        result['status'] = 1
        result['msg'] = '页面提交参数可能为空'
        return HttpResponse(json.dumps(result),
                            content_type='application/json')

    sql_content = sql_content.strip()

    # 获取用户信息
    user = request.user

    # 过滤注释语句和非查询的语句
    sql_content = ''.join(
        map(
            lambda x: re.compile(r'(^--\s+.*|^/\*.*\*/;\s*$)').sub(
                '', x, count=1), sql_content.splitlines(1))).strip()
    # 去除空行
    sql_content = re.sub('[\r\n\f]{2,}', '\n', sql_content)

    sql_list = sql_content.strip().split('\n')
    for sql in sql_list:
        if re.match(r"^select|^show|^explain", sql.lower()):
            break
        else:
            result['status'] = 1
            result['msg'] = '仅支持^select|^show|^explain语法,请联系管理员!'
            return HttpResponse(json.dumps(result),
                                content_type='application/json')

    # 按照分号截取第一条有效sql执行
    sql_content = sql_content.strip().split(';')[0]

    try:
        # 查询权限校验
        priv_check_info = query_priv_check(user, instance_name, db_name,
                                           sql_content, limit_num)

        if priv_check_info['status'] == 0:
            limit_num = priv_check_info['data']['limit_num']
            priv_check = priv_check_info['data']['priv_check']
        else:
            return HttpResponse(json.dumps(priv_check_info),
                                content_type='application/json')

        if re.match(r"^explain", sql_content.lower()):
            limit_num = 0

        # 对查询sql增加limit限制
        if re.match(r"^select", sql_content.lower()):
            if re.search(r"limit\s+(\d+)$", sql_content.lower()) is None:
                if re.search(r"limit\s+\d+\s*,\s*(\d+)$",
                             sql_content.lower()) is None:
                    sql_content = sql_content + ' limit ' + str(limit_num)

        sql_content = sql_content + ';'

        # 执行查询语句,统计执行时间
        t_start = time.time()
        sql_result = Dao(instance_name=instance_name).mysql_query(
            str(db_name), sql_content, limit_num)
        t_end = time.time()
        cost_time = "%5s" % "{:.4f}".format(t_end - t_start)

        sql_result['cost_time'] = cost_time

        # 数据脱敏,同样需要检查配置,是否开启脱敏,语法树解析是否允许出错继续执行
        hit_rule = 0 if re.match(r"^select", sql_content.lower(
        )) else 2  # 查询是否命中脱敏规则,0, '未知', 1, '命中', 2, '未命中'
        masking = 2  # 查询结果是否正常脱敏,1, '是', 2, '否'
        t_start = time.time()
        # 仅对查询语句进行脱敏
        if SysConfig().sys_config.get('data_masking') and re.match(
                r"^select", sql_content.lower()):
            try:
                masking_result = datamasking.data_masking(
                    instance_name, db_name, sql_content, sql_result)
                if masking_result['status'] != 0 and SysConfig(
                ).sys_config.get('query_check'):
                    return HttpResponse(json.dumps(masking_result),
                                        content_type='application/json')
                else:
                    hit_rule = masking_result['data']['hit_rule']
                    masking = 1 if hit_rule == 1 else 2
            except Exception:
                logger.error(traceback.format_exc())
                hit_rule = 0
                masking = 2
                if SysConfig().sys_config.get('query_check'):
                    result['status'] = 1
                    result['msg'] = '脱敏数据报错,请联系管理员'
                    return HttpResponse(json.dumps(result),
                                        content_type='application/json')

        t_end = time.time()
        masking_cost_time = "%5s" % "{:.4f}".format(t_end - t_start)

        sql_result['masking_cost_time'] = masking_cost_time

        result['data'] = sql_result

        # 成功的查询语句记录存入数据库
        if sql_result.get('Error'):
            pass
        else:
            query_log = QueryLog()
            query_log.username = user.username
            query_log.user_display = user.display
            query_log.db_name = db_name
            query_log.instance_name = instance_name
            query_log.sqllog = sql_content
            if int(limit_num) == 0:
                limit_num = int(sql_result['effect_row'])
            else:
                limit_num = min(int(limit_num), int(sql_result['effect_row']))
            query_log.effect_row = limit_num
            query_log.cost_time = cost_time
            query_log.priv_check = priv_check
            query_log.hit_rule = hit_rule
            query_log.masking = masking
            # 防止查询超时
            try:
                query_log.save()
            except:
                connection.close()
                query_log.save()
    except Exception as e:
        logger.error(traceback.format_exc())
        result['status'] = 1
        result['msg'] = str(e)

    # 返回查询结果
    try:
        return HttpResponse(json.dumps(result,
                                       cls=ExtendJSONEncoder,
                                       bigint_as_string=True),
                            content_type='application/json')
    except Exception:
        return HttpResponse(json.dumps(result,
                                       default=str,
                                       bigint_as_string=True,
                                       encoding='latin1'),
                            content_type='application/json')