Exemple #1
0
def zx_list(xmurl, pagenum): #执行列表查询,返回总页数、总条数、每页数据
    username = login_tool.login_tools().get_username().decode('utf-8')
    xmmc = cache_tool.res_cache(username)
    logger.info(xmmc)
    if xmmc[0] == None:
        xmmc_list = {}
    else:
        xmmc_list = re.split(r',', xmmc[0])
    if xmurl in xmmc_list or xmmc[0] == None:
        sql_list = "SELECT zx.c_bh as zxbh,yl.c_ylmc as ylmc,to_char(zx.dt_zxsj, 'yyyy-mm-dd hh24:mi:ss') as zxsj,zx.c_cgl as cgl,zx.c_fgl as fgl,zx.c_tgl as tgl, c_sfci AS zxfs "\
            "FROM db_apitesting.t_at_zxxx zx LEFT JOIN db_apitesting.t_at_ylxx yl ON zx.c_bh_yl = yl.c_bh LEFT JOIN db_apitesting.t_at_xmxx xm ON yl.c_bh_xm = xm.c_bh "\
                "WHERE xm.c_url = '%s' ORDER BY zx.dt_zxsj DESC, zx.c_bh DESC LIMIT %s OFFSET %s;" % (xmurl, 10, (int(pagenum) - 1)*10)
        sql_count = "SELECT count(1) as counts "\
            "FROM db_apitesting.t_at_zxxx zx LEFT JOIN db_apitesting.t_at_ylxx yl ON zx.c_bh_yl = yl.c_bh LEFT JOIN db_apitesting.t_at_xmxx xm ON yl.c_bh_xm = xm.c_bh "\
                "WHERE xm.c_url = '%s';" % xmurl
        try:
            project_dict = {}
            res_list = all_dbc.pg_select_operator(sql_list)
            res_count = all_dbc.pg_select_operator(sql_count)
            project_dict['maxpage'] = math.ceil(res_count[0]['counts']/10)
            project_dict['maxsize'] = res_count[0]['counts']
            project_dict['nowpage'] = int(pagenum)
            project_dict['reslist'] = res_list
            return jsonify(project_dict)
        except Exception as eee:
            logger.error(eee)
            return jsonify({'result': 'fail'})
    else:
        abort(403)
Exemple #2
0
def ci_info(ci_id):
    yl_list_sql = "SELECT c_yl_list AS list, c_api AS api FROM db_apitesting.t_at_ci WHERE c_bh = '%s';" % ci_id
    try:
        yl_list_res = all_dbc.pg_select_operator(yl_list_sql)
    except Exception as eee:
        logger.error('CI详情信息查询失败:' + str(eee))
        logger.exception(eee)
        return jsonify({'result': 'fail'})
    yl_list = re.split(',', str(yl_list_res[0]['list']))
    if len(yl_list) == 0:
        return jsonify({'result': 'fail'})
    else:
        yl_info = str(yl_list)[1:-1]
        yl_info_sql = "SELECT c_bh AS key, c_ylmc AS title FROM db_apitesting.t_at_ylxx WHERE c_bh IN (%s);" % yl_info
        try:
            yl_info_res = all_dbc.pg_select_operator(yl_info_sql)
        except Exception as eee:
            logger.error('CI详情信息查询失败:' + str(eee))
            logger.exception(eee)
            return jsonify({'result': 'fail'})
        res_dic = {}
        res_dic['result'] = 'success'
        res_dic['api_linux'] = com_spl(yl_list_res[0]['api'])
        res_dic['api_docker'] = ''
        res_dic['res_list'] = yl_info_res
        return jsonify(res_dic)
Exemple #3
0
def yl_list(xmurl, pagenum):  #列表查询,返回总页数、总条数、每页数据
    sql_list = "select yl.c_bh as ylbh,yl.c_ylmc as ylmc,to_char(yl.dt_scsj, 'yyyy-mm-dd hh24:mi:ss') as scsj,yl.n_zxcs as zxcs,yl.c_sfbj as bjzt "\
    "from db_apitesting.t_at_xmxx xm left join db_apitesting.t_at_ylxx yl on xm.c_bh = yl.c_bh_xm "\
    "where xm.c_url = '%s' ORDER by yl.dt_scsj desc LIMIT %s OFFSET %s;" % (xmurl, 8, (int(pagenum) - 1)*8)
    sql_count = "select count(1) as counts "\
    "from db_apitesting.t_at_xmxx xm left join db_apitesting.t_at_ylxx yl on xm.c_bh = yl.c_bh_xm "\
    "where xm.c_url = '%s';" % xmurl
    try:
        project_dict = {}
        res_count = all_dbc.pg_select_operator(sql_count)
        # 判断一下最大页数,如果当前请求页数超出数据的最大页数,直接返回最大页的数据
        if math.ceil(res_count[0]['counts'] / 8) < int(pagenum):
            sql_list = "select yl.c_bh as ylbh,yl.c_ylmc as ylmc,to_char(yl.dt_scsj, 'yyyy-mm-dd hh24:mi:ss') as scsj,yl.n_zxcs as zxcs,yl.c_sfbj as bjzt "\
                    "from db_apitesting.t_at_xmxx xm left join db_apitesting.t_at_ylxx yl on xm.c_bh = yl.c_bh_xm "\
                    "where xm.c_url = '%s' ORDER by yl.dt_scsj desc LIMIT %s OFFSET %s;" % (xmurl, 8, (int(math.ceil(res_count[0]['counts']/8)) - 1)*8)
            pagenum = math.ceil(res_count[0]['counts'] / 8)
        res_list = all_dbc.pg_select_operator(sql_list)
        project_dict['maxpage'] = math.ceil(res_count[0]['counts'] / 8)
        project_dict['maxsize'] = res_count[0]['counts']
        project_dict['nowpage'] = int(pagenum)
        xh = 0
        for arr in res_list:
            arr['xh'] = xh
            arr['update'] = 0
            xh = xh + 1
        project_dict['reslist'] = res_list
        return jsonify(project_dict)
    except Exception as eee:
        logger.error(eee)
        return jsonify({'result': 'fail'})
Exemple #4
0
def ci_list(xmurl, pagenum):
    ci_list_sql = "SELECT ci.c_bh AS key,ci.c_api AS url, ci.c_zxcs AS times, ci.c_ylsl AS ylsl, to_char(ci.dt_zxdysj, 'yyyy-mm-dd hh24:mi:ss') AS zxdysj, 0 AS ck, 0 AS bj FROM db_apitesting.t_at_xmxx xm left join db_apitesting.t_at_ci ci on xm.c_bh = ci.c_bh_xm WHERE xm.c_url = '%s' ORDER BY ci.dt_cjsj DESC LIMIT %s OFFSET %s;" % (
        xmurl, 5, (int(pagenum) - 1) * 5)
    ci_list_count_sql = "SELECT count(1) AS counts FROM db_apitesting.t_at_xmxx xm right join db_apitesting.t_at_ci ci on xm.c_bh = ci.c_bh_xm WHERE xm.c_url = '%s';" % xmurl
    try:
        ci_list_res = all_dbc.pg_select_operator(ci_list_sql)
        ci_list_count_res = all_dbc.pg_select_operator(ci_list_count_sql)
    except Exception as eee:
        logger.error('CI列表信息查询失败:' + str(eee))
        logger.exception(eee)
        return jsonify({'result': 'fail'})
    if ci_list_count_res[0]['counts'] == 0:
        return jsonify({
            'maxpage': 0,
            'maxsize': 0,
            'nowpage': 0,
            'reslist': []
        })
    else:
        ci_list = {}
        ci_list['maxpage'] = math.ceil(ci_list_count_res[0]['counts'] / 5)
        ci_list['maxsize'] = ci_list_count_res[0]['counts']
        ci_list['nowpage'] = int(pagenum)
        ci_list['reslist'] = ci_list_res
        return jsonify(ci_list)
Exemple #5
0
def zxcs():
    if not request.args or 'ylbh' not in request.args:
        abort(400)
    ylbh = request.args.get('ylbh')
    # 根据ylbh查询出对应的执行参数
    zxcs_sql_count = "SELECT count(1), c_bh_yl as ylbh FROM db_apitesting.t_at_zxcs WHERE c_bh_yl = '%s' GROUP BY c_bh_yl" % ylbh
    zxcs_sql = "SELECT c_bh as key, c_key as zxcs_key, c_value as zxcs_value FROM db_apitesting.t_at_zxcs WHERE c_bh_yl = '%s' ORDER BY n_xh ASC" % ylbh
    api_count = "SELECT c_api_count AS api_docs FROM db_apitesting.t_at_ylxx WHERE c_bh = '%s';" % ylbh
    try:
        counts = all_dbc.pg_select_operator(zxcs_sql_count)
        list_cs = all_dbc.pg_select_operator(zxcs_sql)
        api = all_dbc.pg_select_operator(api_count)
    except Exception as eee:
        logger.error('查询参数的sql报错了:' + str(eee))
        return jsonify({'result': 'fail', 'msg': str(eee)})
    res_info = {}
    if len(counts) != 1:
        res_info['ylbh'] = ylbh
        if len(api) != 1:
            res_info['api_docs'] = ''
        else:
            res_info['api_docs'] = api[0]['api_docs']
        res_info['maxsize'] = 0
        res_info['reslist'] = []
        return jsonify(res_info)
    else:
        res_info['ylbh'] = counts[0]['ylbh']
        if len(api) != 1:
            res_info['api_docs'] = ''
        else:
            res_info['api_docs'] = api[0]['api_docs']
        res_info['maxsize'] = counts[0]['count']
        res_info['reslist'] = list_cs
        return jsonify(res_info)
Exemple #6
0
def add_ci():
    if not request.json or 'xmurl' not in request.json or 'ylinfo' not in request.json:
        abort(400)
    xmurl = request.json['xmurl']
    ylinfo = request.json['ylinfo']
    if len(ylinfo) == 0:
        return jsonify({'result': 'fail'})
    xm_id_sql = "SELECT c_bh AS xm_id FROM db_apitesting.t_at_xmxx WHERE c_url = '%s';" % xmurl
    try:
        xm_id_res = all_dbc.pg_select_operator(xm_id_sql)
    except Exception as eee:
        logger.error('CI项目编号查询失败:' + str(eee))
        logger.exception(eee)
        return jsonify({'result': 'fail'})
    ci_id = base_tool.next_id()
    api_url = '/apitest/ci_control/' + ci_id
    ci_sql = "INSERT INTO db_apitesting.t_at_ci(c_bh, c_api, c_yl_list, dt_zxdysj, c_zxcs, c_bh_xm, c_ylsl, dt_cjsj) VALUES"\
        " ('%s', '%s', '%s', NULL, '0', '%s', '%s', now());" % (ci_id, api_url, ",".join(ylinfo), xm_id_res[0]['xm_id'], len(ylinfo))
    try:
        all_dbc.pg_insert_operator(ci_sql)
    except Exception as eee:
        logger.error('CI项目创建失败:' + str(eee))
        logger.exception(eee)
        return jsonify({'result': 'fail'})
    return jsonify({'result': 'success'})
Exemple #7
0
def yl_del(ylbh):  #用例记录删除,todo缺少文件实体的删除
    sql_path = "SELECT c_bclj as yllj FROM db_apitesting.t_at_ylxx WHERE c_bh = '%s'" % ylbh  #查询路径,要先查
    sql_del_yl = "DELETE FROM db_apitesting.t_at_ylxx WHERE c_bh = '%s'" % ylbh  #删除ylxx主表的数据
    # sql_del_zx = "DELETE FROM db_apitesting.t_at_zxxx WHERE c_bh_yl = '%s'" % ylbh #删除zxxx子表的数据
    # sql_del_qq = "DELETE FROM db_apitesting.t_at_qqxx WHERE c_bh_yl = '%s'" % ylbh #删除qqxx子表的数据
    sql_del_cs = "DELETE FROM db_apitesting.t_at_zxcs WHERE c_bh_yl = '%s'" % ylbh  #删除zxcs子表的数据
    try:
        yl_path = all_dbc.pg_select_operator(sql_path)
    except Exception as eee:
        logger.error('查询路径报的错:' + str(eee))
        return jsonify({'result': 'fail', 'msg': '用例删除失败'})
    if (os.path.exists(yl_path[0]['yllj'])):
        try:
            os.remove(yl_path[0]['yllj'])
        except Exception as eee:
            logger.error('删除原用例文件失败:' + str(eee))
            return jsonify({'result': 'fail', 'msg': str(eee)})
    try:
        # for sql in [sql_del_yl, sql_del_qq, sql_del_zx, sql_del_cs]:
        for sql in [sql_del_yl, sql_del_cs]:
            all_dbc.pg_delete_operator(sql)
        return jsonify({'result': 'success'})
    except Exception as eee:
        logger.error(eee)
        return jsonify({'result': 'fail'})
def downloadfile(ylbh):  #下载文件
    sql_filepath = "SELECT c_bclj as lj,c_ylmc as mc FROM db_apitesting.t_at_ylxx WHERE c_bh = '%s';" % ylbh
    logger.info(sql_filepath)
    try:
        filepath = all_dbc.pg_select_operator(sql_filepath)
        new_filepath = filepath[0]['lj']
        p, f = os.path.split(new_filepath)
        if os.path.isfile(new_filepath):
            response = make_response(
                send_from_directory(p, f, as_attachment=True))
            logger.info('编码格式:' +
                        str(chardet.detect(str.encode(filepath[0]['mc']))))
            logger.info(
                '文件名称:' +
                str(filepath[0]['mc'].encode('utf-8').decode('latin-1')))
            response.headers[
                "content-disposition"] = "attachment; filename={}".format(
                    str('测试文件名称.xlsx').encode().decode('latin-1'))
            response.headers[
                "content-type"] = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
            response.headers[
                "Access-Control-Expose-Headers"] = "Content-disposition"  # 解决前端无法访问到返回头中的文件名称的问题
            # response.headers["Content-Disposition"] = "{}".format(filepath[0]['mc'].encode().decode('latin-1'))
            return response
        else:
            return jsonify({'result': 'fail', 'msg': '文件不存在'})
    except Exception as eee:
        logger.error('文件下载报错:' + str(eee))
        return jsonify({'result': 'fail', 'msg': str(eee)})
def run_times(tjfs):
    # tjfs【统计方式】    1: 用例名称,2:项目名称,3:开发团队名称,4:测试团队名称
    tjfs = int(tjfs)
    if tjfs == 1:
        run_times_sql = "SELECT ylxx.c_ylmc AS name, Round(AVG(CAST(LEFT(zxxx.c_fgl, -1) AS numeric)), 2) AS x, Round(AVG(CAST(LEFT(zxxx.c_tgl, -1) AS numeric)), 2) AS y, sum(ylxx.n_zxcs) AS z FROM db_apitesting.t_at_ylxx ylxx INNER JOIN db_apitesting.t_at_zxxx zxxx ON ylxx.c_bh = zxxx.c_bh_yl WHERE zxxx.c_fgl NOT IN ('计算中', '异常中断,未计算', '不计算') GROUP BY ylxx.c_ylmc;"
    elif tjfs == 2:
        run_times_sql = "SELECT xmxx.c_xmmc AS name, aaa.avg_num_fg AS x, aaa.avg_num_tg AS y, aaa.times AS z FROM (SELECT ylxx.c_bh_xm AS xmbh, Round(AVG(CAST(LEFT(zxxx.c_fgl, -1) AS numeric)), 2) AS avg_num_fg, Round(AVG(CAST(LEFT(zxxx.c_tgl, -1) AS numeric)), 2) AS avg_num_tg, sum(ylxx.n_zxcs) AS times FROM db_apitesting.t_at_ylxx ylxx INNER JOIN db_apitesting.t_at_zxxx zxxx ON ylxx.c_bh = zxxx.c_bh_yl WHERE zxxx.c_fgl NOT IN ('计算中', '异常中断,未计算', '不计算') GROUP BY ylxx.c_bh_xm) aaa LEFT JOIN db_apitesting.t_at_xmxx xmxx ON aaa.xmbh = xmxx.c_bh;"
    elif tjfs == 3:
        run_times_sql = "SELECT xmxx.c_code_team AS name, Round(AVG(aaa.avg_num_fg), 2) AS x, Round(AVG(aaa.avg_num_tg),2) AS y, sum(aaa.times) AS z FROM (SELECT ylxx.c_bh_xm AS xmbh, Round(AVG(CAST(LEFT(zxxx.c_fgl, -1) AS numeric)), 2) AS avg_num_fg, Round(AVG(CAST(LEFT(zxxx.c_tgl, -1) AS numeric)), 2) AS avg_num_tg, sum(ylxx.n_zxcs) AS times FROM db_apitesting.t_at_ylxx ylxx INNER JOIN db_apitesting.t_at_zxxx zxxx ON ylxx.c_bh = zxxx.c_bh_yl WHERE zxxx.c_fgl NOT IN ('计算中', '异常中断,未计算', '不计算') GROUP BY ylxx.c_bh_xm) aaa LEFT JOIN db_apitesting.t_at_xmxx xmxx ON aaa.xmbh = xmxx.c_bh GROUP BY xmxx.c_code_team;"
    elif tjfs == 4:
        run_times_sql = "SELECT xmxx.c_test_team AS name, Round(AVG(aaa.avg_num_fg), 2) AS x, Round(AVG(aaa.avg_num_tg),2) AS y, sum(aaa.times) AS z FROM (SELECT ylxx.c_bh_xm AS xmbh, Round(AVG(CAST(LEFT(zxxx.c_fgl, -1) AS numeric)), 2) AS avg_num_fg, Round(AVG(CAST(LEFT(zxxx.c_tgl, -1) AS numeric)), 2) AS avg_num_tg, sum(ylxx.n_zxcs) AS times FROM db_apitesting.t_at_ylxx ylxx INNER JOIN db_apitesting.t_at_zxxx zxxx ON ylxx.c_bh = zxxx.c_bh_yl WHERE zxxx.c_fgl NOT IN ('计算中', '异常中断,未计算', '不计算') GROUP BY ylxx.c_bh_xm) aaa LEFT JOIN db_apitesting.t_at_xmxx xmxx ON aaa.xmbh = xmxx.c_bh GROUP BY xmxx.c_test_team;"
    else:
        abort(400)
    try:
        run_times_res = all_dbc.pg_select_operator(run_times_sql)
    except Exception as eee:
        logger.error('执行次数sql报错:' + str(eee))
        logger.exception(eee)
        return jsonify({'result': 'fail', 'msg': str(eee)})
    if tjfs == 3 or tjfs == 4:
        for info_run in run_times_res:
            try:
                name = cache_tool.res_cache(info_run['name'])
            except:
                name = info_run['name']
            info_run['name'] = name[0]
    return jsonify(run_times_res)
Exemple #10
0
def zx_info(zxbh): #执行详细
    sql_info = "SELECT c_fgl as fgl, c_cgl as cgl, c_tgl as tgl FROM db_apitesting.t_at_zxxx WHERE c_bh = '%s';" % zxbh
    try:
        res_list = all_dbc.pg_select_operator(sql_info)
        return jsonify(res_list)
    except Exception as eee:
        logger.error(eee)
        return jsonify({'result': 'fail'})
def index_rate():
    if not request.json or 'tjfs' not in request.json or 'limit' not in request.json:
        abort(400)
    # tjfs【统计方式】    1: 用例名称,2:项目名称,3:开发团队名称,4:测试团队名称
    tjfs = request.json['tjfs']
    limit = request.json['limit']
    if isinstance(limit, int):
        pass
    else:
        abort(400)
    if tjfs == 1:
        cov_rate_sql = "SELECT ylxx.c_ylmc AS type, Round(AVG(CAST(LEFT(zxxx.c_fgl, -1) AS numeric)), 2) AS value, concat(Round(AVG(CAST(LEFT(zxxx.c_fgl, -1) AS numeric)), 2), '%%') AS sold FROM db_apitesting.t_at_ylxx ylxx INNER JOIN db_apitesting.t_at_zxxx zxxx ON ylxx.c_bh = zxxx.c_bh_yl WHERE zxxx.c_fgl NOT IN ('计算中', '异常中断,未计算', '不计算') GROUP BY ylxx.c_bh, ylxx.c_ylmc ORDER BY value ASC LIMIT %d;" % limit
        pass_rate_sql = "SELECT ylxx.c_ylmc AS type, Round(AVG(CAST(LEFT(zxxx.c_tgl, -1) AS numeric)), 2) AS value, concat(Round(AVG(CAST(LEFT(zxxx.c_tgl, -1) AS numeric)), 2), '%%') AS sold FROM db_apitesting.t_at_ylxx ylxx INNER JOIN db_apitesting.t_at_zxxx zxxx ON ylxx.c_bh = zxxx.c_bh_yl GROUP BY ylxx.c_bh, ylxx.c_ylmc ORDER BY value ASC LIMIT %d;" % limit
    elif tjfs == 2:
        cov_rate_sql = "SELECT xmxx.c_xmmc AS type, aaa.avg_num AS value, aaa.sold_text as sold FROM (SELECT ylxx.c_bh_xm AS xmbh, Round(AVG(CAST(LEFT(zxxx.c_fgl, -1) AS numeric)), 2) AS avg_num, concat(Round(AVG(CAST(LEFT(zxxx.c_fgl, -1) AS numeric)), 2), '%%') AS sold_text FROM db_apitesting.t_at_ylxx ylxx INNER JOIN db_apitesting.t_at_zxxx zxxx ON ylxx.c_bh = zxxx.c_bh_yl WHERE zxxx.c_fgl NOT IN ('计算中', '异常中断,未计算', '不计算') GROUP BY ylxx.c_bh_xm ORDER BY avg_num ASC LIMIT %d) aaa LEFT JOIN db_apitesting.t_at_xmxx xmxx ON aaa.xmbh = xmxx.c_bh ORDER BY aaa.avg_num ASC;" % limit
        pass_rate_sql = "SELECT xmxx.c_xmmc AS type, aaa.avg_num AS value, aaa.sold_text as sold FROM (SELECT ylxx.c_bh_xm AS xmbh, Round(AVG(CAST(LEFT(zxxx.c_tgl, -1) AS numeric)), 2) AS avg_num, concat(Round(AVG(CAST(LEFT(zxxx.c_tgl, -1) AS numeric)), 2), '%%') AS sold_text FROM db_apitesting.t_at_ylxx ylxx INNER JOIN db_apitesting.t_at_zxxx zxxx ON ylxx.c_bh = zxxx.c_bh_yl GROUP BY ylxx.c_bh_xm ORDER BY avg_num ASC LIMIT %d) aaa LEFT JOIN db_apitesting.t_at_xmxx xmxx ON aaa.xmbh = xmxx.c_bh ORDER BY aaa.avg_num ASC;" % limit
    elif tjfs == 3:
        cov_rate_sql = "SELECT ylxx.c_code_team AS type, Round(AVG(CAST(LEFT(zxxx.c_fgl, -1) AS numeric)), 2) AS value, concat(Round(AVG(CAST(LEFT(zxxx.c_fgl, -1) AS numeric)), 2), '%%') AS sold FROM db_apitesting.t_at_ylxx ylxx INNER JOIN db_apitesting.t_at_zxxx zxxx ON ylxx.c_bh = zxxx.c_bh_yl WHERE zxxx.c_fgl NOT IN ('计算中', '异常中断,未计算', '不计算') GROUP BY ylxx.c_code_team ORDER BY value ASC LIMIT %d;" % limit
        pass_rate_sql = "SELECT ylxx.c_code_team AS type, Round(AVG(CAST(LEFT(zxxx.c_tgl, -1) AS numeric)), 2) AS value, concat(Round(AVG(CAST(LEFT(zxxx.c_tgl, -1) AS numeric)), 2), '%%') AS sold FROM db_apitesting.t_at_ylxx ylxx INNER JOIN db_apitesting.t_at_zxxx zxxx ON ylxx.c_bh = zxxx.c_bh_yl GROUP BY ylxx.c_code_team ORDER BY value ASC LIMIT %d;" % limit
    elif tjfs == 4:
        cov_rate_sql = "SELECT ylxx.c_test_team AS type, Round(AVG(CAST(LEFT(zxxx.c_fgl, -1) AS numeric)), 2) AS value, concat(Round(AVG(CAST(LEFT(zxxx.c_fgl, -1) AS numeric)), 2), '%%') AS sold FROM db_apitesting.t_at_ylxx ylxx INNER JOIN db_apitesting.t_at_zxxx zxxx ON ylxx.c_bh = zxxx.c_bh_yl WHERE zxxx.c_fgl NOT IN ('计算中', '异常中断,未计算', '不计算') GROUP BY ylxx.c_test_team ORDER BY value ASC LIMIT %d;" % limit
        pass_rate_sql = "SELECT ylxx.c_test_team AS type, Round(AVG(CAST(LEFT(zxxx.c_tgl, -1) AS numeric)), 2) AS value, concat(Round(AVG(CAST(LEFT(zxxx.c_tgl, -1) AS numeric)), 2), '%%') AS sold FROM db_apitesting.t_at_ylxx ylxx INNER JOIN db_apitesting.t_at_zxxx zxxx ON ylxx.c_bh = zxxx.c_bh_yl GROUP BY ylxx.c_test_team ORDER BY value ASC LIMIT %d;" % limit
    else:
        abort(400)
    try:
        cov_rate_res = all_dbc.pg_select_operator(cov_rate_sql)
        pass_rate_res = all_dbc.pg_select_operator(pass_rate_sql)
    except Exception as eee:
        logger.error('首页统计sql报错:' + str(eee))
        logger.exception(eee)
        return jsonify({'result': 'fail', 'msg': str(eee)})
    if tjfs == 3 or tjfs == 4:
        for info_cov in cov_rate_res:
            try:
                name = cache_tool.res_cache(info_cov['type'])
            except:
                name = info_cov['type']
            info_cov['type'] = name[0]
        for info_pass in pass_rate_res:
            try:
                name = cache_tool.res_cache(info_pass['type'])
            except:
                name = info_pass['type']
            info_pass['type'] = name[0]
    return jsonify([cov_rate_res, pass_rate_res])
Exemple #12
0
def zx_qqxx_list(zxbh, pagenum): #请求信息列表查询,返回总页数、总条数、每页数据
    sql_list = "SELECT n_xh as xh, n_qqmc as mc, c_qqdz as dz, c_xysj as xysj,to_char(zx.dt_zxsj, 'yyyy-mm-dd hh24:mi:ss') as zxsj, n_jkzt as jkzt, c_yzjg as jzjg, "\
        "c_qqcs as qqcs, c_yqfhz as yqfhz, c_sjfhz as sjfhz FROM db_apitesting.t_at_qqxx "\
            "WHERE c_bh_zx = '%s' ORDER BY n_xh ASC LIMIT %s OFFSET %s;" % (zxbh, 10, (int(pagenum) - 1)*10)
    sql_count = "SELECT count(1) as counts FROM db_apitesting.t_at_qqxx "\
            "WHERE c_bh_zx = '%s';" % zxbh
    try:
        project_dict = {}
        res_list = all_dbc.pg_select_operator(sql_list)
        res_count = all_dbc.pg_select_operator(sql_count)
        project_dict['maxpage'] = math.ceil(res_count[0]['counts']/10)
        project_dict['maxsize'] = res_count[0]['counts']
        project_dict['nowpage'] = int(pagenum)
        project_dict['reslist'] = res_list
        return jsonify(project_dict)
    except Exception as eee:
        logger.error(eee)
        return jsonify({'result': 'fail'})
Exemple #13
0
def ci_yl_list(xmurl):
    yl_list_sql = "SELECT yl.c_bh AS key, yl.c_ylmc AS title FROM db_apitesting.t_at_xmxx xm left join db_apitesting.t_at_ylxx yl on xm.c_bh = yl.c_bh_xm WHERE xm.c_url = '%s';" % xmurl
    try:
        yl_list_res = all_dbc.pg_select_operator(yl_list_sql)
    except Exception as eee:
        logger.error('CI用例信息查询失败:' + str(eee))
        logger.exception(eee)
        return jsonify({'result': 'fail'})
    return jsonify(yl_list_res)
Exemple #14
0
def sql(yl_id):
    ylinfo_sql = "SELECT c_bclj as yllj, c_api_count AS api_docs FROM db_apitesting.t_at_ylxx WHERE c_bh = '%s';" % yl_id
    try:
        ylinfo_res = all_dbc.pg_select_operator(ylinfo_sql)
    except Exception as eee:
        logger.error('CI执行查询用例路径失败:' + str(eee))
        logger.exception(eee)
        return jsonify({'result': 'fail'})
    return jsonify(ylinfo_res)
def uploadfile():  #上传文件
    if 'file' not in request.files or 'xmdz' not in request.form:
        abort(400)
    xmdz = request.form.get('xmdz')
    file = request.files.get('file')
    sql_xm_bh = "SELECT c_bh, c_code_team AS code, c_test_team AS test FROM db_apitesting.t_at_xmxx WHERE c_url = '%s';" % (
        xmdz)
    try:
        xm_info = all_dbc.pg_select_operator(sql_xm_bh)
        if len(xm_info) == 0:
            return jsonify({'result': 'fail', 'msg': '项目不存在'})
        else:
            xmbh = xm_info[0]['c_bh']
    except Exception as eee:
        logger.error('根据项目地址查询项目编号失败' + str(eee))
        return jsonify({'result': 'fail', 'msg': str(eee)})
    all_name = os.path.splitext(file.filename)
    file_name = all_name[-2]
    Suffix = all_name[-1]
    if Suffix in app.config['ALLOWED_EXTENSIONS']:
        file_uuid = base_tool.next_id()
        newpath = os.path.join(app.config['UPLOAD_FOLDER'], xmdz)
        if not os.path.isdir(newpath):
            os.makedirs(newpath)
        newpath_all = os.path.join(newpath, str(file_uuid + '.xlsx'))
        try:
            file.save(newpath_all)
        except Exception as eee:
            logger.error('文件保存失败: ' + str(eee))
            return jsonify({'result': 'fail', 'msg': str(eee)})
        # 开始解析参数,如果解析失败,不进行用例保存
        try:
            cs_res = panda_for_web.read_keylist(newpath_all)
        except Exception as eee:
            return jsonify({'result': 'fail', 'msg': '解析参数失败,请检查参数sheet页'})
        ylbh = base_tool.next_id()
        sql_addyl = "INSERT INTO db_apitesting.t_at_ylxx(c_bh, c_bh_xm, c_ylmc, dt_scsj, dt_zxzxsj, n_zxcs, c_bclj, c_edit_key, c_sfbj, dt_gxsj, c_code_team, c_test_team) "\
                    "VALUES ('%s', '%s', '%s', now(), NULL, 0, '%s', '%s', 2, now(), '%s', '%s');" % (ylbh, xmbh, file_name, newpath_all, ylbh, xm_info[0]['code'], xm_info[0]['test'])
        try:
            all_dbc.pg_insert_operator(sql_addyl)
        except Exception as eee:
            logger.error('插入用例信息错误:' + str(eee))
            return jsonify({'result': 'fail'})
        intnum = 1
        for num in cs_res:
            sql_addcs = "INSERT INTO db_apitesting.t_at_zxcs(c_bh, c_bh_yl, c_key, c_value, n_xh) VALUES "\
            "('%s', '%s', '%s', '%s', '%s')" % (base_tool.next_id(), ylbh, num, cs_res[num], intnum)
            try:
                all_dbc.pg_insert_operator(sql_addcs)
            except Exception as eee:
                logger.error('插入参数报错:' + str(eee))
                return jsonify({'result': 'fail', 'msg': '参数插入失败'})
            intnum = intnum + 1
        return jsonify({'result': 'success'})
    else:
        return jsonify({'result': 'fail', 'msg': '文件类型错误,仅支持.xlsx格式'})
Exemple #16
0
def update_time_order():
    '''
    获取更新时间最靠前的5个项目
    '''
    sql_time_order = "SELECT ylxx.c_bh AS key, xmxx.c_xmmc AS name, ylxx.c_ylmc AS ylmc, to_char(ylxx.dt_gxsj, 'yyyy-mm-dd hh24:mi:ss') AS time FROM db_apitesting.t_at_ylxx ylxx,db_apitesting.t_at_xmxx xmxx WHERE ylxx.c_bh_xm = xmxx.c_bh AND ylxx.dt_gxsj IS NOT NULL ORDER BY ylxx.dt_gxsj DESC LIMIT 5;"
    try:
        project_dict = all_dbc.pg_select_operator(sql_time_order)
        return jsonify(project_dict)
    except Exception as eee:
        logger.error('项目列表接口报错:' + str(eee))
        return jsonify({'result': 'fail', 'msg': str(eee)})
Exemple #17
0
def run_time_order():
    '''
    获取执行时间最靠前的5个项目
    '''
    sql_time_order = "SELECT aaa.c_bh AS key, xmxx.c_xmmc AS name, aaa.c_ylmc AS ylmc, aaa.c_tgl AS tgl, aaa.c_fgl AS fgl, aaa.time FROM (SELECT zxxx.c_bh, ylxx.c_bh_xm, ylxx.c_ylmc, zxxx.c_fgl, zxxx.c_tgl, to_char(zxxx.dt_zxsj, 'yyyy-mm-dd hh24:mi:ss') AS time FROM db_apitesting.t_at_zxxx zxxx LEFT JOIN db_apitesting.t_at_ylxx ylxx ON zxxx.c_bh_yl = ylxx.c_bh WHERE zxxx.dt_zxsj IS NOT NULL AND zxxx.n_zt > '0' ORDER BY zxxx.dt_zxsj DESC LIMIT 5) aaa LEFT JOIN db_apitesting.t_at_xmxx xmxx ON aaa.c_bh_xm = xmxx.c_bh ORDER BY aaa.time DESC;"
    try:
        project_dict = all_dbc.pg_select_operator(sql_time_order)
        return jsonify(project_dict)
    except Exception as eee:
        logger.error('项目列表接口报错:' + str(eee))
        return jsonify({'result': 'fail', 'msg': str(eee)})
def project_list():
    username = login_tool.login_tools().get_username().decode('utf-8')
    xmmc = cache_tool.res_cache(username)
    if xmmc[0] == None:
        sql = "SELECT c_bh as xmbh,c_xmmc as xmmc,c_url as xmdz FROM db_apitesting.t_at_xmxx"
        sql_count = "SELECT count(1) as counts FROM db_apitesting.t_at_xmxx"
    else:
        xmmc = xmmc[0].replace(',', '\',\'')
        xmmc = '\'' + str(xmmc) + '\''
        sql = "SELECT c_bh as xmbh,c_xmmc as xmmc,c_url as xmdz FROM db_apitesting.t_at_xmxx WHERE c_url IN (%s)" % xmmc
        sql_count = "SELECT count(1) as counts FROM db_apitesting.t_at_xmxx WHERE c_url IN (%s)" % xmmc
    try:
        project_dict = {}
        res_list = all_dbc.pg_select_operator(sql)
        res_count = all_dbc.pg_select_operator(sql_count)
        project_dict['maxsize'] = res_count[0]['counts']
        project_dict['reslist'] = res_list
        return jsonify(project_dict)
    except Exception as eee:
        logger.error('项目列表接口报错:' + str(eee))
        return jsonify({'result': 'fail'})        
Exemple #19
0
def add_cache():
    with app.app_context():
        cache_local.clear()
    user_xm_sql = "SELECT c_username as user, c_xmxx as xm, c_name as name FROM db_apitesting.t_zx_user;"
    user_xm = all_dbc.pg_select_operator(user_xm_sql)
    org_info_sql = "SELECT c_bh AS id, c_mc AS mc, c_pid AS pid FROM db_apitesting.t_at_org;"
    org_info = all_dbc.pg_select_operator(org_info_sql)
    org_pid_sql = "SELECT c_bh AS id, c_pid AS pid FROM db_apitesting.t_at_org ORDER BY c_pid;"
    org_pid = all_dbc.pg_select_operator(org_pid_sql)
    if len(org_pid) != 0:
        org_group = {}
        for info in org_pid:
            if info['pid'] == '':
                org_group[info['id']] = []
            elif info['pid'] not in org_group.keys():
                org_group[info['pid']] = [info['id']]
            else:
                org_group[info['pid']].append(info['id'])
                # org_group[info['pid']] = org_group[info['pid']].append(info['id'])
        # for info in org_group:
        with app.app_context():
            cache_local.set('org_group', org_group)
    if len(org_info) != 0:
        for info in org_info:
            with app.app_context():
                cache_local.set(info['id'], [info['mc'], info['pid']])
    # logger.info(str(user_xm))
    logger.info('··开始加载用户信息缓存···' + str(cache_local))
    if len(user_xm) != 0:
        for info in user_xm:
            with app.app_context():
                cache_local.set(info['user'], [info['xm'], info['name']])
    with app.app_context():
        cache_local.set('localip', get_host_ip())
        logger.info('···用户缓存加载完毕···' + str(cache_local))
        logger.info(cache_local.get('q'))
Exemple #20
0
def sjfb():
    if not request.args or 'zxid' not in request.args:
        abort(400)
    zxid = request.args.get('zxid')
    sql_sjfb = "select n_qqmc as date, c_xysj as actual, 3000 as expected from db_apitesting.t_at_qqxx where c_bh_zx = '%s' order by n_xh asc;" % zxid
    try:
        sql_sjfb_res = all_dbc.pg_select_operator(sql_sjfb)
    except Exception as eee:
        logger.error('查询分布时间的sql错误:' + str(eee))
        return jsonify({'result': 'fail', 'msg': '查询失败'})
    for num in range(len(sql_sjfb_res)):
        if str(sql_sjfb_res[num]['actual']) == '0':
            sql_sjfb_res[num]['actual'] = 0
        else:
            sql_sjfb_res[num]['actual'] = int(
                float((sql_sjfb_res[num]['actual'])[:-1]) * 1000)
    return jsonify(sql_sjfb_res)
Exemple #21
0
def view_page(ylbh, ylmc):
    info = {}
    info['ylbh'] = ylbh
    ip = cache_tool.res_cache('localip')
    wjlj = 'http://' + str(ip) + ':8585/downloadfile/' + str(ylbh)
    # wjlj = 'http://' + str(ip) + '/apitest/downloadfile/' + str(ylbh) #生产参数
    edit_key_sql = "SELECT c_edit_key as key FROM db_apitesting.t_at_ylxx WHERE c_bh = '%s'" % (
        ylbh)
    try:
        edit_key = all_dbc.pg_select_operator(edit_key_sql)
    except Exception as eee:
        logger.error('查询edit_key报错:' + str(eee))
    logger.warn(wjlj)
    info['lj'] = wjlj
    info['key'] = edit_key[0]['key']
    username = login_tool.login_tools().get_username().decode('utf-8')
    info['name'] = username
    return render_template('view.html', title=ylmc, info=info)
Exemple #22
0
def ylzx_info(zxid, pagenum):  #列表查询,返回总页数、总条数、每页数,支持状态参数
    zxid = zxid
    pagenum = pagenum
    zt = request.args.get('zt')  # 1:全部, 2:请求成功, 3:请求失败, 4:验证通过, 5:验证失败
    gjz = request.args.get('gjz')  # 查询用的关键字
    if zt == '1':
        tj = ''
    elif zt == '2':
        tj = " AND n_jkzt = '200'"
    elif zt == '3':
        tj = " AND n_jkzt != '200'"
    elif zt == '4':
        tj = " AND c_yzjg != '不通过'"
    elif zt == '5':
        tj = " AND c_yzjg = '不通过'"
    else:
        return jsonify({'result': 'fail', 'msg': '参数不正确'})
    sql_zt = "SELECT n_zt as zt, c_jd as jd FROM db_apitesting.t_at_zxxx WHERE c_bh = '%s';" % zxid
    sql_num_base = "SELECT count(1) as counts FROM db_apitesting.t_at_qqxx WHERE c_bh_zx = '%s'" % zxid
    sql_allnum_base = "SELECT count(1) as counts FROM db_apitesting.t_at_qqxx WHERE c_bh_zx = '%s'" % zxid
    sql_num_base_cg = "SELECT count(1) as counts FROM db_apitesting.t_at_qqxx WHERE c_bh_zx = '%s' AND n_jkzt = '200'" % zxid
    sql_num_base_tg = "SELECT count(1) as counts FROM db_apitesting.t_at_qqxx WHERE c_bh_zx = '%s' AND c_yzjg != '不通过'" % zxid
    sql_base = "SELECT c_bh as key, n_qqmc as name, c_qqdz as url, c_xysj as xysj, to_char(dt_zxsj, 'yyyy-mm-dd hh24:mi:ss') as zxsj, "\
                "n_jkzt as jkzt, c_yzjg as yzjg, c_qqcs as cs, c_yqfhz as yqfhz, c_sjfhz as sjfhz, c_matchinfo as matchinfo, c_ycxx as ycxx "\
                "FROM db_apitesting.t_at_qqxx WHERE c_bh_zx = '%s'" % zxid
    like_text = " and n_qqmc like '%s'" % str("%" + gjz + "%")
    paga = " order by n_xh LIMIT 10 OFFSET " + str((int(pagenum) - 1) * 10)
    if len(gjz) == 0:
        sql_allnum = sql_allnum_base
        sql_num = sql_num_base + tj
        sql_all = sql_base + tj + paga
        sql_num_cg = sql_num_base_cg
        sql_num_tg = sql_num_base_tg
    else:
        sql_allnum = sql_allnum_base + like_text
        sql_num = sql_num_base + tj + like_text
        sql_all = sql_base + tj + like_text + paga
        sql_num_cg = sql_num_base_cg + like_text
        sql_num_tg = sql_num_base_tg + like_text
    try:
        list_allnum = all_dbc.pg_select_operator(sql_allnum)
        list_con = all_dbc.pg_select_operator(sql_num)
        list_all = all_dbc.pg_select_operator(sql_all)
        list_cg = all_dbc.pg_select_operator(sql_num_cg)
        list_tg = all_dbc.pg_select_operator(sql_num_tg)
        zx_nt = all_dbc.pg_select_operator(sql_zt)
    except Exception as eee:
        logger.error(eee)
        return jsonify({'result': 'fial', 'msg': str(eee)})
    project_dict = {}
    project_dict['zt'] = zx_nt[0]['zt']
    project_dict['jd'] = zx_nt[0]['jd']
    project_dict['maxpage'] = math.ceil(list_con[0]['counts'] / 10)
    project_dict['maxsize'] = list_con[0]['counts']
    data_list = [
        list_allnum[0]['counts'], list_cg[0]['counts'],
        int(list_allnum[0]['counts']) - int(list_cg[0]['counts']),
        list_tg[0]['counts'],
        int(list_allnum[0]['counts']) - int(list_tg[0]['counts'])
    ]
    for idx, val in enumerate(data_list):
        if val < 0:
            data_list[idx] = 0
    project_dict['counts'] = data_list
    project_dict['nowpage'] = int(pagenum)
    for info in list_all:
        re_name = ['key', 'cs', 'yqfhz', 'sjfhz', 'matchinfo', 'ycxx']
        re_dict = {}
        for n in re_name:
            re_dict[n] = info[n]
            if n != 'key':
                del info[n]
        info['innerlist'] = [re_dict]
    project_dict['reslist'] = list_all
    return jsonify(project_dict)
Exemple #23
0
def tjxx():
    if not request.args or 'zxid' not in request.args:
        abort(400)
    # 根据ylbh查询出用例对应的用例文件
    zxid = request.args.get('zxid')
    sql_tjxx = "select  c_cg as cg, c_wcg as wcg, c_tg as tg, c_wtg as wtg from db_apitesting.t_at_zxxx where c_bh = '%s';" % zxid
    try:
        sql_tjxx_res = all_dbc.pg_select_operator(sql_tjxx)
    except Exception as eee:
        logger.error('查询通过率和成功率的sql错误:' + str(eee))
        return jsonify({'result': 'fail', 'msg': '查询失败'})
    res_info = [[{
        'item': '调用成功',
        'count': 0,
        'percent': 0
    }, {
        'item': '调用失败',
        'count': 0,
        'percent': 0
    }],
                [{
                    'item': '验证通过',
                    'count': 0,
                    'percent': 0
                }, {
                    'item': '验证失败',
                    'count': 0,
                    'percent': 0
                }]]
    # 返回结果为空,即表示查询出现了问题,目前是为了解决查询时,数据实际还没生成的问题
    if len(sql_tjxx_res) == 0:
        return jsonify(res_info)
    try:
        res_info[0][0]['count'] = int(sql_tjxx_res[0]['cg'])
    except Exception as eee:
        logger.error('统计信息查询结果:' + str(sql_tjxx_res))
        res_info[0][0]['count'] = 0
    try:
        res_info[0][0]['percent'] = round(
            int(sql_tjxx_res[0]['cg']) /
            (int(sql_tjxx_res[0]['cg']) + int(sql_tjxx_res[0]['wcg'])), 4)
    except:
        res_info[0][0]['percent'] = 0
    res_info[0][1]['count'] = int(sql_tjxx_res[0]['wcg'])
    try:
        res_info[0][1]['percent'] = round(
            (1 - int(sql_tjxx_res[0]['cg']) /
             (int(sql_tjxx_res[0]['cg']) + int(sql_tjxx_res[0]['wcg']))), 4)
    except:
        res_info[0][1]['percent']
    res_info[1][0]['count'] = int(sql_tjxx_res[0]['tg'])
    try:
        res_info[1][0]['percent'] = round(
            int(sql_tjxx_res[0]['tg']) /
            (int(sql_tjxx_res[0]['tg']) + int(sql_tjxx_res[0]['wtg'])), 4)
    except:
        res_info[1][0]['percent'] = 0
    res_info[1][1]['count'] = int(sql_tjxx_res[0]['wtg'])
    try:
        res_info[1][1]['percent'] = round(
            (1 - int(sql_tjxx_res[0]['tg']) /
             (int(sql_tjxx_res[0]['tg']) + int(sql_tjxx_res[0]['wtg']))), 4)
    except:
        res_info[1][1]['percent'] = 0
    return jsonify(res_info)
def updatefile():  #上传文件
    if 'file' not in request.files or 'ylbh' not in request.form:
        abort(400)
    ylbh = request.form.get('ylbh')
    file = request.files.get('file')
    sql_yl_bh = "SELECT c_bclj as yllj FROM db_apitesting.t_at_ylxx WHERE c_bh = '%s'" % (
        ylbh)
    try:
        yl_old = all_dbc.pg_select_operator(sql_yl_bh)
        if len(yl_old) == 0:
            return jsonify({'result': 'fail', 'msg': '用例信息已不存在,请刷新页面'})
        else:
            yllj_old = yl_old[0]['yllj']
    except Exception as eee:
        logger.error('更新用例时,查询失败:' + str(eee))
        return jsonify({'result': 'fail', 'msg': str(eee)})
    # 开始保存新的用例文件
    # 获取原来的保存路径
    p_old, f = os.path.split(yllj_old)
    all_name = os.path.splitext(file.filename)
    # file_name = all_name[-2]
    Suffix = all_name[-1]
    if Suffix in app.config['ALLOWED_EXTENSIONS']:
        # 删除之前的用例文件
        if (os.path.exists(yllj_old)):
            try:
                os.remove(yllj_old)
            except Exception as eee:
                logger.error('删除原用例失败:' + str(eee))
                return jsonify({'result': 'fail', 'msg': str(eee)})
        file_uuid = base_tool.next_id()
        newpath = p_old
        if not os.path.isdir(newpath):
            os.makedirs(newpath)
        newpath_all = os.path.join(newpath, str(file_uuid + '.xlsx'))
        try:
            file.save(newpath_all)
        except Exception as eee:
            logger.error('文件保存失败: ' + str(eee))
            return jsonify({'result': 'fail', 'msg': str(eee)})
        # 开始解析参数,如果解析失败,不进行用例保存
        try:
            cs_res = panda_for_web.read_keylist(newpath_all)
        except Exception as eee:
            return jsonify({'result': 'fail', 'msg': '解析参数失败,请检查参数sheet页'})
        # 删除之前的参数
        cs_del_sql = "DELETE FROM db_apitesting.t_at_zxcs WHERE c_bh_yl = '%s'" % ylbh
        try:
            all_dbc.pg_delete_operator(cs_del_sql)
        except Exception as eee:
            logger.error('删除参数出错:' + str(eee))
            return jsonify({'result': 'fail', 'msg': '删除参数报错'})
        intnum = 1
        for num in cs_res:
            sql_addcs = "INSERT INTO db_apitesting.t_at_zxcs(c_bh, c_bh_yl, c_key, c_value, n_xh) VALUES "\
            "('%s', '%s', '%s', '%s', '%s')" % (base_tool.next_id(), ylbh, num, cs_res[num], intnum)
            try:
                all_dbc.pg_insert_operator(sql_addcs)
            except Exception as eee:
                logger.error('插入参数报错:' + str(eee))
                return jsonify({'result': 'fail', 'msg': '参数插入失败'})
            intnum = intnum + 1
        try:
            new_edit_key = base_tool.next_id()
            sql_addyl = "UPDATE db_apitesting.t_at_ylxx SET c_bclj = '%s', c_edit_key = '%s', dt_gxsj = now() WHERE c_bh = '%s'" % (
                newpath_all,
                new_edit_key,
                ylbh,
            )
            all_dbc.pg_insert_operator(sql_addyl)
        except Exception as eee:
            logger.error('插入用例信息错误:' + str(eee))
            return jsonify({'result': 'fail'})
        return jsonify({'result': 'success'})
    else:
        return jsonify({'result': 'fail', 'msg': '文件类型错误,仅支持.xlsx格式'})
Exemple #25
0
def ci_run(ci_id):
    if not request.json:
        logger.error('CI调用失败,请求参数' + str(request))
        abort(400)
    cs_info = request.json
    yl_list_sql = "SELECT c_yl_list AS list FROM db_apitesting.t_at_ci WHERE c_bh = '%s';" % ci_id
    try:
        yl_list_res = all_dbc.pg_select_operator(yl_list_sql)
    except Exception as eee:
        logger.error('CI执行用例查询失败:' + str(eee))
        logger.exception(eee)
        return jsonify({'result': 'fail'})
    if len(yl_list_res) == 0 or len(yl_list_res[0]['list']) == 0:
        return jsonify({'result': 'fail', 'msg': 'CI设置不存在或绑定用例为空'})
    else:
        yl_list = re.split(',', yl_list_res[0]['list'])
        for yl_id in yl_list:
            ylinfo_sql = "SELECT c_bclj as yllj, c_api_count AS api_docs FROM db_apitesting.t_at_ylxx WHERE c_bh = '%s';" % yl_id
            yl_cs_sql = "SELECT c_key AS key, c_value AS value FROM db_apitesting.t_at_zxcs WHERE c_bh_yl = '%s';" % yl_id
            try:
                ylinfo_res = all_dbc.pg_select_operator(ylinfo_sql)
                yl_cs_res = all_dbc.pg_select_operator(yl_cs_sql)
            except Exception as eee:
                logger.error('CI执行查询用例路径失败:' + str(eee))
                logger.exception(eee)
                return jsonify({'result': 'fail'})
            new_kv = {}
            logger.info(str(yl_cs_res))
            for cs in yl_cs_res:
                new_kv[cs['key']] = cs['value']
            if len(ylinfo_res) != 1:
                return jsonify({'result': 'fail', 'msg': '用例可能已经不存在了'})
            else:
                yl_path = ylinfo_res[0]['yllj']
                api_docs = ylinfo_res[0]['api_docs']
                if str(api_docs) == 'null' or api_docs == '':
                    api_docs = ''
                try:
                    sfjs = cs_info['sfjs']
                    if str(sfjs) == '0':
                        sfjs = True
                    else:
                        sfjs = False
                except:
                    sfjs = False
                try:
                    case_info_all = panda_for_web.read_case(yl_path)
                except Exception as eee:
                    logger.error('用例读取失败:' + str(eee))
                    return jsonify({
                        'result': 'fail',
                        'msg': '用例文件不存在或用例内容不正确,请重新上传'
                    })
                if case_info_all == False:
                    return jsonify({
                        'result': 'fail',
                        'msg': '用例文件不存在或用例内容不正确,请重新上传'
                    })
                case_info_all.insert(0, new_kv)
                try:
                    db_case = db_clints_for_web.db_clints(case_info_all[2])
                except Exception as eee:
                    return jsonify({'result': 'fail', 'msg': str(eee)})
                if db_case == False:
                    return jsonify({
                        'result': 'fail',
                        'msg': '数据库连接创建失败,请检查用例中的数据库配置'
                    })
                logger.info("开始关闭数据链接")
                db_clints_for_web.db_tools().db_close()
                logger.info("完成关闭数据链接")
                zx_num = "UPDATE db_apitesting.t_at_ylxx SET n_zxcs = n_zxcs + 1 WHERE c_bh = '%s';" % yl_id
                try:
                    all_dbc.pg_update_operator(zx_num)
                except Exception as eee:
                    logger.error('更新执行次数失败' + str(eee))
                    return jsonify({'result': 'fail'})
                zxid = base_tool.next_id()
                zxjl_sql = "INSERT INTO db_apitesting.t_at_zxxx(c_bh, c_bh_yl, dt_zxsj, c_fgl, c_cgl, c_tgl, n_zt, c_cg, c_wcg, c_tg, c_wtg, c_sfci) VALUES"\
                            " ('%s', '%s', now(), '0%', '0%', '0%', 0, 0, 0, 0, 0, 0);" % (zxid, yl_id)
                try:
                    all_dbc.pg_insert_operator(zxjl_sql)
                except Exception as eee:
                    logger.error('新增执行信息失败' + str(eee))
                    return jsonify({'result': 'fail'})
                logger.info('···开始调用线程池···')
                logger.info(str(sfjs) + str(zxid) + str(api_docs))
                run_caselist_new.run_caselist(zxid, yl_id, case_info_all,
                                              api_docs, sfjs, cs_info)
                logger.info('···调用线程池完毕···')
        ci_up_sql = "UPDATE db_apitesting.t_at_ci SET c_zxcs = c_zxcs + 1, dt_zxdysj = now() WHERE c_bh = '%s';" % ci_id
        try:
            all_dbc.pg_update_operator(ci_up_sql)
        except Exception as eee:
            logger.error('更新CI信息失败' + str(eee))
            logger.exception(eee)
            return jsonify({'result': 'fail'})
        return jsonify({'result': 'success', 'msg': 'CI执行成功'})
Exemple #26
0
def office_update():
    # logger.info(request.json)
    if request.json['status'] == 2:
        logger.info('编辑结束,开始保存用例···')
        file_id = request.json['key']
        file = requests.get(request.json['url'], stream=False)
        sql_yl_bh = "SELECT c_bh as ylbh, c_bclj as yllj FROM db_apitesting.t_at_ylxx WHERE c_edit_key = '%s'" % (
            file_id)
        try:
            yl_old = all_dbc.pg_select_operator(sql_yl_bh)
            yllj_old = yl_old[0]['yllj']
            yl_bh = yl_old[0]['ylbh']
        except Exception as eee:
            logger.error('更新用例时,查询失败:' + str(eee))
            return jsonify({'result': 'fail'})
        # 开始保存新的用例文件
        # 获取原来的保存路径
        p_old, f = os.path.split(yllj_old)
        # 删除之前的用例文件
        if (os.path.exists(yllj_old)):
            try:
                os.remove(yllj_old)
            except Exception as eee:
                logger.error('删除原用例失败:' + str(eee))
        file_uuid = base_tool.next_id()
        newpath = p_old
        if not os.path.isdir(newpath):
            os.makedirs(newpath)
        newpath_all = os.path.join(newpath, str(file_uuid + '.xlsx'))
        try:
            with open(newpath_all, "wb") as f:
                f.write(file.content)
        except Exception as eee:
            logger.error('文件保存失败: ' + str(eee))
        # 开始解析参数,如果解析失败,不进行用例保存
        cs_res = panda_for_web.read_keylist(newpath_all)
        # 删除之前的参数
        cs_del_sql = "DELETE FROM db_apitesting.t_at_zxcs WHERE c_bh_yl = '%s'" % yl_bh
        try:
            all_dbc.pg_delete_operator(cs_del_sql)
        except Exception as eee:
            logger.error('删除参数出错:' + str(eee))
        intnum = 1
        '''
        待优化,将sql放到外层组装,最终一起执行
        '''
        for num in cs_res:
            sql_addcs = "INSERT INTO db_apitesting.t_at_zxcs(c_bh, c_bh_yl, c_key, c_value, n_xh) VALUES "\
            "('%s', '%s', '%s', '%s', '%s')" % (base_tool.next_id(), yl_bh, num, cs_res[num], intnum)
            # logger.info(sql_addcs)
            try:
                all_dbc.pg_insert_operator(sql_addcs)
            except Exception as eee:
                logger.error('插入参数报错:' + str(eee))
            intnum = intnum + 1
        try:
            sql_addyl = "UPDATE db_apitesting.t_at_ylxx SET dt_gxsj = now(), c_bclj = '%s', c_sfbj = 2, c_edit_key = '%s' WHERE c_bh = '%s'" % (
                newpath_all, str(base_tool.next_id())[:20], yl_old[0]['ylbh'])
            # logger.info(str(sql_addyl))
            all_dbc.pg_update_operator(sql_addyl)
        except Exception as eee:
            logger.error('插入用例信息错误:' + str(eee))
        logger.info('保存结束')
        return jsonify({"error": 0})
    elif request.json['status'] == 4:
        file_id = request.json['key']
        try:
            sql_upyl = "UPDATE db_apitesting.t_at_ylxx SET c_sfbj = 2 WHERE c_edit_key = '%s'" % (
                file_id)
            all_dbc.pg_update_operator(sql_upyl)
            return jsonify({"error": 0})
        except Exception as eee:
            logger.error('插入用例信息错误:' + str(eee))
            return jsonify({'result': 'fail'})
    else:
        return jsonify({"error": 0})
Exemple #27
0
def run_case():
    if not request.json or 'ylbh' not in request.json or 'list' not in request.json:
        abort(400)
    # 根据ylbh查询出用例对应的用例文件
    ylbh = request.json['ylbh']
    cs_res = request.json['list']
    sfjs = request.json['sfjs']
    api_docs = request.json['api_docs']
    if sfjs:
        if str(api_docs) != '':
            if str(api_docs[:4]).upper() == 'HTTP':
                pass
            else:
                try:
                    api_docs = int(api_docs)
                except:
                    return jsonify({'result': 'fail', 'msg': '接口地址错误或数量书写不规范'})
        else:
            return jsonify({'result': 'fail', 'msg': '接口地址错误或数量书写不规范'})
    # 删除之前的参数
    cs_del_sql = "DELETE FROM db_apitesting.t_at_zxcs WHERE c_bh_yl = '%s'" % ylbh
    try:
        all_dbc.pg_delete_operator(cs_del_sql)
    except Exception as eee:
        logger.error('删除参数信息报错:' + str(eee))
        return jsonify({'result': 'fail', 'msg': '删除参数失败'})
    intnum = 1
    new_kv = {}
    for num in range(len(cs_res)):
        sql_addcs = "INSERT INTO db_apitesting.t_at_zxcs(c_bh, c_bh_yl, c_key, c_value, n_xh) VALUES "\
                    "('%s', '%s', '%s', '%s', '%s')" % (base_tool.next_id(), ylbh, cs_res[num]['zxcs_key'], cs_res[num]['zxcs_value'], intnum)
        try:
            all_dbc.pg_insert_operator(sql_addcs)
            new_kv[cs_res[num]['zxcs_key']] = cs_res[num]['zxcs_value']
        except Exception as eee:
            logger.error('插入参数报错:' + str(eee))
            return jsonify({'result': 'fail', 'msg': '参数插入失败'})
        intnum = intnum + 1
    lj_sql = "SELECT c_bclj as yllj FROM db_apitesting.t_at_ylxx WHERE c_bh = '%s';" % ylbh
    try:
        yl_path = all_dbc.pg_select_operator(lj_sql)
        if len(yl_path) == 0:
            return jsonify({'result': 'fail', 'msg': '用例文件不存在,请重新上传'})
    except Exception as eee:
        logger.error('查询用例路径失败:' + str(eee))
        return jsonify({'result': 'fail', 'msg': str(eee)})
    # 读取文件,返回格式[参数dict, [总条数, 用例信息], 数据库数组]
    try:
        case_info_all = panda_for_web.read_case(yl_path[0]['yllj'])
        # logger.info(case_info_all)
    except Exception as eee:
        logger.error('用例读取失败:' + str(eee))
        return jsonify({'result': 'fail', 'msg': '用例文件不存在或用例内容不正确,请重新上传'})
    if case_info_all == False:
        return jsonify({'result': 'fail', 'msg': '用例文件不存在或用例内容不正确,请重新上传'})
    # 将参数放入用例信息的首位
    case_info_all.insert(0, new_kv)
    # logger.info(case_info_all)
    # 初始化用例中使用的数据库连接
    try:
        db_case = db_clints_for_web.db_clints(case_info_all[2])

    except Exception as eee:
        return jsonify({'result': 'fail', 'msg': str(eee)})
    if db_case == False:
        return jsonify({'result': 'fail', 'msg': '数据库连接创建失败,请检查用例中的数据库配置'})
    logger.info("开始关闭数据链接")
    db_clints_for_web.db_tools().db_close()
    logger.info("完成关闭数据链接")
    # 更新执行次数
    zx_num = "UPDATE db_apitesting.t_at_ylxx SET n_zxcs = n_zxcs + 1, c_api_count = '%s' WHERE c_bh = '%s';" % (
        api_docs, ylbh)
    try:
        all_dbc.pg_update_operator(zx_num)
    except Exception as eee:
        logger.error('更新执行次数失败' + str(eee))
        return jsonify({'result': 'fail', 'msg': str(eee)})
    '''
    # 创建子进程
    ~~~~~此处由于进程创建时完整复制当前进程的内容,包括数据库连接,因此会导致在主进程(不知原因,可能是因为程序报错)关闭时,导致子进程的数据库连接发生异常~~~~~
    ~~~~~由此修改为调用线程池中的预留线程,通过vthread提供的线程池装饰器来实现,在主进程中直接调用对应的被装饰函数即可~~~~~
    p = Process(target=run_caselist_new.run_caselist, args=(zxid, ylbh, case_info_all, ))
    logger.info('创建子进程,主进程id:' + str(os.getpid()))
    try:
        p.start()
    except Exception as eaa:
        logger.error('子进程启动失败:' + str(eaa))
        return jsonify({'result': 'fail', 'msg': str(eaa)})
    logger.info('子进程状态' + str(p.is_alive()))
    '''
    # 插入用例执行记录
    zxid = base_tool.next_id()
    zxjl_sql = "INSERT INTO db_apitesting.t_at_zxxx(c_bh, c_bh_yl, dt_zxsj, c_fgl, c_cgl, c_tgl, n_zt, c_cg, c_wcg, c_tg, c_wtg, c_sfci) VALUES"\
                " ('%s', '%s', now(), 0, 0, 0, 0, 0, 0, 0, 0, 1);" % (zxid, ylbh)
    try:
        all_dbc.pg_insert_operator(zxjl_sql)
    except Exception as eee:
        logger.error('插入执行信息失败' + str(eee))
        return jsonify({'result': 'fail', 'msg': str(eee)})
    # 开始执行用例
    logger.info('···开始调用线程池···')
    run_caselist_new.run_caselist(zxid, ylbh, case_info_all, api_docs, sfjs)
    logger.info('···调用线程池完毕···')
    return jsonify({'result': 'success', 'msg': '成功', 'zxid': zxid})