Esempio n. 1
0
def get_server_list_table(business_type, datum):
    table = HtmlTable()
    table.mtitle = u"服务器运行状况"
    table.mheader = [u"驻地", u"业务类型", "IP", u"200占比(%)", u"记录数"]
    table.msub = []

    sql = "select Area, ISP, Type, IP, 100*Ratio, Records from view_servers_status "
    sql += "where Date='%s'" % datum
    if business_type != business_types[0]:
        sql += " and Type='%s'" % business_type

    logger.debug("Server List SQL - %s" % sql)

    cu = connection.cursor()
    begin_time = current_time()
    cu.execute(sql)
    results = cu.fetchall()
    logger.info("execute sql:  %s, cost: %s" % (sql,
                                                (current_time() - begin_time)))
    subs = []
    for row in results:
        sub = []
        sub.append("%s_%s" % (row[0], row[1]))
        sub.append("%s" % row[2])
        sub.append("%s" % row[3])
        sub.append("%.2f" % (float(row[4])))
        sub.append("%s" % row[5])
        subs.append(sub)

    table.msub = subs

    return table
Esempio n. 2
0
def display_daily_reporter(request, dev=""):
    begin_time = current_time()
    (service_type, device_type, device_types,
            version, versions, version2, versions2, begin_date, end_date) \
        = get_report_filter_param_values(request, "playinfo")
    context = {}
    context['default_service_type'] = service_type
    context['service_types'] = ["B2B", "B2C"]
    context['default_device_type'] = device_type
    context['device_types'] = device_types
    context['default_version'] = version
    context['versions'] = versions
    context['default_version2'] = version2
    context['versions2'] = versions2
    context['default_begin_date'] = str(begin_date)
    context['default_end_date'] = str(end_date)

    urls_suffix = ['service_type=%s&device_type=%s&version=%s&begin_date=%s&end_date=%s ' \
        % (service_type, device_type, version, begin_date, end_date), \
        'service_type=%s&device_type=%s&version=%s&begin_date=%s&end_date=%s' \
        % (service_type, device_type, version2, begin_date, end_date),]

    tables = get_daily_report_tables(urls_suffix, begin_date, end_date,
                                     service_type, device_type, version,
                                     version2)
    context['has_table'] = True
    context['tables'] = tables

    response = render_to_response('show_daily_report.html', context)
    logger.info("generate report, cost: %s" % (current_time() - begin_time))

    return response
Esempio n. 3
0
def prepare_hourly_ratio_history(ip, code, begin_date, end_date, xalis):
    data_by_hour = {}
    if_has_data = False
    data_count = len(xalis)

    cu = connection.cursor()

    # for ratio
    sql = "select Date, Hour, 100*Ratio from view_codeinfo "
    sql += "where IP='%s' and Date>='%s' and Date<='%s' " % (ip, begin_date,
                                                             end_date)
    sql += "and Hour<24 and Code=%d " % (code)

    logger.debug("Ratio SQL - %s" % sql)

    date_ratio = [0.0 for k in range(data_count)]

    begin_time = current_time()
    cu.execute(sql)
    results = cu.fetchall()
    logger.info("execute sql:  %s, cost: %s" % (sql,
                                                (current_time() - begin_time)))
    for row in results:
        day_offset = get_days_offset(begin_date, str(row[0]))
        data_idx = 24 * day_offset + row[1]
        date_ratio[data_idx] = "%.2f" % float(row[2])
        if row[2] > 0:
            if_has_data = True

    data_by_hour[0] = ['%s' % k for k in date_ratio]

    # for Record
    sql = "select Date, Hour, sum(Records) from view_codeinfo "
    sql += "where IP='%s' and Date>='%s' and Date<='%s' " % (ip, begin_date,
                                                             end_date)
    sql += "and Hour<24 group by Date, Hour"

    logger.debug("Records SQL - %s" % sql)

    date_records = [0.0 for k in range(data_count)]

    begin_time = current_time()
    cu.execute(sql)
    results = cu.fetchall()
    logger.info("execute sql:  %s, cost: %s" % (sql,
                                                (current_time() - begin_time)))
    for row in results:
        day_offset = get_days_offset(begin_date, str(row[0]))
        data_idx = 24 * day_offset + row[1]
        date_records[data_idx] = "%d" % row[2]
        if row[2] > 0:
            if_has_data = True

    data_by_hour[1] = ['%s' % k for k in date_records]

    if if_has_data == False:
        return None

    return data_by_hour
Esempio n. 4
0
def get_multi_qos_data(table,
                       view_types,
                       begin_date,
                       end_date,
                       service_type,
                       beta_ver,
                       master_ver,
                       p95_exception_value,
                       base_radis=1):
    qos_data = []
    vers = []
    if len(beta_ver) > 0:
        vers.append(beta_ver)
    if len(master_ver) > 0:
        vers.append(master_ver)

    for (view, second) in view_types:
        for ver in vers:
            begin_time = current_time()
            temp = [0 for i in range(7)]
            temp[0] = u"%s-%s" % (ver, second)

            Q_conditions = Q(ServiceType=service_type) & Q(DeviceType=ver)
            if service_type == "B2C":
                Q_conditions = Q_conditions & Q(ISP='all') & Q(Area='all')
            Q_conditions = Q_conditions & Q(ViewType=view)
            Q_conditions = Q_conditions & \
                Q(Date__gte=begin_date) & Q(Date__lte=end_date)
            Q_conditions = Q_conditions & Q(Hour=24)

            count = 0
            items = table.objects.filter(Q_conditions)
            for item in items:
                if item.P95 < p95_exception_value:
                    continue
                temp[1] += item.P25
                temp[2] += item.P50
                temp[3] += item.P75
                temp[4] += item.P90
                temp[5] += item.P95
                temp[6] += item.AverageTime
                count += 1
            if count > 0:
                for i in range(6):
                    temp[i + 1] = temp[i + 1] / count / base_radis

            qos_data.append(temp)
            logger.info("execute sql: ver: %s, pnvalues, cost: %s" %
                        (ver, (current_time() - begin_time)))

    return qos_data
Esempio n. 5
0
def get_single_qos_data2(view_types, begin_date, end_date, service_type,
                         beta_ver, master_ver):
    qos_data = []
    vers = []
    if len(beta_ver) > 0:
        vers.append(beta_ver)
    if len(master_ver) > 0:
        vers.append(master_ver)

    single_qos = [BestvFbuffer, BestvFluency, BestvFluency]
    qos_name = ['SucRatio', 'Fluency', 'PRatio']
    qos_desc = [u'首次缓冲成功率', u'一次不卡比例', u'卡用户卡时间比']
    for index, qos in enumerate(qos_name):
        for ver in vers:
            temp = []
            temp.append("%s-%s" % (qos_desc[index], ver))

            Q_conditions = Q(ServiceType=service_type) & Q(DeviceType=ver)
            if service_type == "B2C":
                Q_conditions = Q_conditions & Q(ISP='all') & Q(Area='all')
            Q_conditions = Q_conditions & Q(
                ViewType__in=[view_type[0] for view_type in view_types])
            Q_conditions = Q_conditions & \
                Q(Date__gte=begin_date) & Q(Date__lte=end_date)
            Q_conditions = Q_conditions & Q(Hour=24)

            begin_time = current_time()
            items = single_qos[index].objects.filter(Q_conditions)
            view_type_data = {}
            for (view, _) in view_types:
                view_type_data[view] = []
            for item in items:
                qos_value = getattr(item, qos)
                if qos_value:
                    view_type_data[item.ViewType].append(qos_value)
            for (view, _) in view_types:
                if view_type_data[view]:
                    temp.append(
                        float("%.3f" % (sum(view_type_data[view]) /
                                        len(view_type_data[view]))))
                else:
                    temp.append(0)
                logger.info("qos count: ver: %s, get %s, count: %s" %
                            (ver, qos, len(view_type_data[view])))
            qos_data.append(temp)
            logger.info("execute sql: ver: %s, get %s, cost: %s" %
                        (ver, qos, (current_time() - begin_time)))

    return qos_data
Esempio n. 6
0
def get_tplayloading_data(begin_date, end_date, service_type, device_type,
                          version, version2, ttype):
    qos_data = []
    vers = []
    if len(version) > 0:
        vers.append(version)
    if version != version2:
        vers.append(version2)
    view_types = VIEW_TYPES[1:2]
    for (view, second) in view_types:
        for ver in vers:
            begin_time = current_time()
            temp = [0 for i in range(6)]
            temp[0] = u"%s_%s-%s" % (device_type, ver, second)

            try:
                q_conditions = Q(ServiceType=service_type)
                q_conditions = q_conditions & Q(DeviceType=device_type)
                q_conditions = q_conditions & Q(VersionType=ver)
                version_id = VersionInfo.objects.get(q_conditions)

                q_conditions = Q(VersionId=version_id)
                q_conditions = q_conditions & Q(ISP='all') & Q(Area='all')
                q_conditions = q_conditions & Q(Hour=24) & Q(ChokeType=ttype)
                q_conditions = q_conditions & Q(Date__gte=begin_date) & Q(
                    Date__lte=end_date)
                q_conditions = q_conditions & Q(ViewType=view)

                count = 0
                items = TPlayloadingInfo.objects.filter(q_conditions)
                for item in items:
                    if item.Records > 0:
                        temp[1] += item.P25
                        temp[2] += item.P50
                        temp[3] += item.P75
                        temp[4] += item.P90
                        temp[5] += item.P95
                        count += 1
                if count > 0:
                    for i in range(6):
                        temp[i + 1] = temp[i + 1] / count
            except Exception, e:
                logger.info("tplayloading sql query exception, %s" % e)

            qos_data.append(temp)
            logger.info(
                "execute tplayloading sql: ver: %s_%s, pnvalues, cost: %s" %
                (device_type, ver, (current_time() - begin_time)))
Esempio n. 7
0
def get_code_distribute(server_ip, begin_date, end_date):
    datas = []
    if_has_data = False

    sql = "select Code, sum(Records) from view_codeinfo "
    sql += "where IP='%s' " % (server_ip)
    sql += "and Date>='%s' and Date<='%s' " % (begin_date, end_date)
    sql += "and Hour<24 group by Code"

    logger.debug("Code Distribute SQL - %s" % sql)

    cu = connection.cursor()

    begin_time = current_time()
    cu.execute(sql)
    results = cu.fetchall()
    logger.info("execute sql:  %s, cost: %s" % (sql,
                                                (current_time() - begin_time)))
    for row in results:
        data_item = [('%s' % row[0]), int(row[1])]
        datas.append(data_item)
        if_has_data = True

    item = {}
    item["index"] = 0
    item["title"] = u"服务器响应状态码分布"

    key_values = [datas]

    keys = [(0, u"占比")]

    series = []
    for (i, desc) in keys:
        serie_item = '''{
            type: 'pie',
            name: '%s',
            data: %s
        }''' % (desc, json.dumps(key_values[i]))
        series.append(serie_item)

    item["series"] = ",".join(series)

    if if_has_data == False:
        return None

    return item
Esempio n. 8
0
def process_multi_plot(request, table, title, subtitle, y_title, view_types, pn_types, base_radix=1):
    begin_time = current_time()
    items = []

    try:
        (service_type, device_type, device_types,
            version, versions, begin_date, end_date) = get_filter_param_values(request)
        if device_type == "":
            raise NoDataError("No data between {0} and {1} in tplay_title".format(begin_date, end_date))

        if version == "All":
            device_type_full = device_type
        else:
            device_type_full = '{0}_{1}'.format(device_type, version)

        filter_params = FilterParams(table, service_type, device_type_full, begin_date, end_date)

        if begin_date == end_date:
            data_by_hour = prepare_pnvalue_hour_data(filter_params, view_types, pn_types, base_radix)

            if data_by_hour is None:
                raise NoDataError("No hour data between {0} and {1}".format(begin_date, end_date))

            item_idx = 0
            for (view_type_idx, view_des) in view_types:
                if view_type_idx not in data_by_hour:
                    continue

                item = make_plot_item(data_by_hour[view_type_idx], pn_types, item_idx, HOUR_X_AXIS, title,
                                      "%s %s" % (subtitle, view_des), y_title)
                if item:
                    items.append(item)
                    item_idx += 1
        else:
            days_region = get_days_region(begin_date, end_date)
            data_by_day = prepare_pnvalue_daily_data(filter_params, days_region, view_types,
                                                     pn_types, base_radix)
            if data_by_day is None:
                raise NoDataError("No daily data between %s - %s" % (begin_date, end_date))

            format_days_region = ["%s%s" % (i[5:7], i[8:10]) for i in days_region]
            item_idx = 0
            for (view_type_idx, view_des) in view_types:
                if view_type_idx not in data_by_day:
                    continue
                item = make_plot_item(data_by_day[view_type_idx], pn_types, item_idx, format_days_region, title,
                                      "%s %s" % (subtitle, view_des), y_title)
                if item:
                    items.append(item)
                    item_idx += 1

    except Exception, e:
        logger.info("query %s multiQos error: %s" % (table.__name__, e))
Esempio n. 9
0
def show_ms_error(request, dev=""):
    context = {}

    date = request.GET.get("date", str(get_day_of_day(-1)))

    table = HtmlTable()
    table.mtitle = "ms_error信息"
    table.mheader = [
        "响应码", "ClientIP", "省份", "运营商", 'ServerIP', '省份', '运营商', '次数',
        'Content_len', 'url'
    ]
    table.msub = []

    sql = "select Resp, ClientIP, ClientISP, ClientArea, ServIP, \
            ServISP, ServArea, Count, ContentLen, URL  \
            from ms_error_info where Date='%s'" % date

    logger.debug("Server List SQL - %s" % sql)

    mysql_cur = connection.cursor()
    begin_time = current_time()
    mysql_cur.execute(sql)
    results = mysql_cur.fetchall()
    logger.info("execute sql:  %s, cost: %s" % (sql,
                                                (current_time() - begin_time)))
    subs = []
    for row in results:
        sub = []
        for i in range(9):
            sub.append(row[i])
        sub.append('''<a href="%s" target="_blank">%s</a>''' %
                   (row[9], row[9]))
        subs.append(sub)

    table.msub = subs

    context['table'] = table
    context['default_date'] = date

    return render_to_response('show_ms_error.html', context)
Esempio n. 10
0
def show_tsdelay(request, dev=""):
    context = {}

    date = request.GET.get("date", str(get_day_of_day(-1)))

    table = HtmlTable()
    table.mtitle = "CDN信息"
    table.mheader = ["ServerIP", "省份", "运营商", '流量(G)', '内网流量占比(%)', '详情']
    table.msub = []

    sql = "select ServIP, ServArea, ServISP, Flow, InnerFlow, ServiceType \
            from ts_delay where Date='%s'" % date

    logger.debug("Server List SQL - %s" % sql)

    mysql_cur = connection.cursor()
    begin_time = current_time()
    mysql_cur.execute(sql)
    results = mysql_cur.fetchall()
    logger.info("execute sql:  %s, cost: %s" % (sql,
                                                (current_time() - begin_time)))
    subs = []
    for row in results:
        sub = []
        for i in range(3):
            sub.append(row[i])
        sub.append("%.3f" % (float(row[3]) / 1024 / 1024 / 1024))
        sub.append("%.1f" % (float(row[4] * 1.0 / row[3] * 100)))
        #sub.append("%s"%"详情")
        sub.append("<a href='/show_cdn_detail?ip=%s&date=%s&servicetype=%s' \
            target='main'>%s</a>" % (row[0], date, row[5], u"详情"))
        subs.append(sub)
    table.msub = subs

    context['table'] = table
    context['default_date'] = date

    return render_to_response('show_tsdelay.html', context)
Esempio n. 11
0
def show_cdn_detail(request, dev=""):
    context = {}
    date = request.GET.get("date", str(get_day_of_day(-1)))
    servicetype = request.GET.get("servicetype", 'B2B')
    servip = request.GET.get("ip", "127.0.0.1")
    sql = "select ServArea, ServISP, Flow, InnerFlow, Info \
            from ts_delay where ServIP='%s' and ServiceType='%s' \
            and Date='%s'" % (servip, servicetype, date)

    mysql_cur = connection.cursor()
    begin_time = current_time()
    mysql_cur.execute(sql)
    results = mysql_cur.fetchall()
    logger.info("execute sql:  %s, cost: %s" % (sql,
                                                (current_time() - begin_time)))
    if len(results) <= 0:
        return render_to_response('bestv_servers_map.html', context)

    row = results[0]
    area = row[0]
    isp = row[1]
    tflow = row[2]
    context['title'] = '服务器信息: 速率KBps'
    context['subtitle'] = '%s-%s-%s' % (area, isp, date)
    context['legendTxt'] = servip

    item = {}
    item["title"] = '%s-%s-%s' % (servip, area, isp)
    info = row[4]
    context["pie_contents"] = make_pie_items(info)

    # maps
    item["geos"] = get_china_geos(area, isp, info, tflow, 0.02)
    item["rates"] = make_rates(area, isp, info, tflow, 0.02)
    item["flows"] = make_flows(info, tflow, 0.02)

    context['item'] = item
    return render_to_response('bestv_servers_map.html', context)
Esempio n. 12
0
def get_records_data(view_types, begin_date, end_date, service_type, beta_ver,
                     master_ver):
    qos_data = []
    vers = []
    if len(beta_ver) > 0:
        vers.append(beta_ver)
    if len(master_ver) > 0:
        vers.append(master_ver)

    for ver in vers:
        Q_conditions = Q(ServiceType=service_type) & Q(DeviceType=ver)
        if service_type == "B2C":
            Q_conditions = Q_conditions & Q(ISP='all') & Q(Area='all')
        Q_conditions = Q_conditions & Q(
            ViewType__in=[view_type[0] for view_type in view_types])
        Q_conditions = Q_conditions & \
            Q(Date__gte=begin_date) & Q(Date__lte=end_date)
        Q_conditions = Q_conditions & Q(Hour=24)

        begin_time = current_time()
        playinfos = BestvPlayinfo.objects.filter(Q_conditions)
        view_type_data = {}
        for (view, _) in view_types:
            view_type_data[view] = 0
        for playinfo in playinfos:
            view_type_data[playinfo.ViewType] += playinfo.Records

        temp = []
        temp.append("%s" % (ver))
        for (view, _) in view_types:
            temp.append(view_type_data[view])
        temp.append(sum(view_type_data.values()))
        qos_data.append(temp)
        logger.info("execute sql: ver: %s, get records, cost: %s" %
                    (ver, (current_time() - begin_time)))

    return qos_data
Esempio n. 13
0
def append_to_excel(wb, table, sheet, row_idx):
    begin_time = current_time()

    sheet = wb.add_sheet(sheet)
    #sheet.col(0).width=3000
    #sheet.col(2).width=4000

    heading_xf = xlwt.easyxf(
        'borders: left thin, right thin, top thin, bottom thin; font: bold on; pattern: pattern solid, fore_colour bright_green'
    )
    data_xf = xlwt.easyxf(
        'borders: left thin, right thin, top thin, bottom thin; font: name Arial'
    )
    spec_xf = xlwt.easyxf('font: name Arial, colour Red')

    row_idx = write_remarks_to_xls(wb, sheet, row_idx, [table.mtitle], spec_xf)
    row_idx += 1

    row_idx = write_xls(wb, sheet, row_idx, table.mheader, table.msub,
                        heading_xf, data_xf)
Esempio n. 14
0
            item_idx = 0
            for (view_type_idx, view_des) in view_types:
                if view_type_idx not in data_by_day:
                    continue
                item = make_plot_item(data_by_day[view_type_idx], pn_types, item_idx, format_days_region, title,
                                      "%s %s" % (subtitle, view_des), y_title)
                if item:
                    items.append(item)
                    item_idx += 1

    except Exception, e:
        logger.info("query %s multiQos error: %s" % (table.__name__, e))

    context = dict()
    context['default_service_type'] = service_type
    context['service_types'] = SERVICE_TYPES
    context['default_device_type'] = device_type
    context['device_types'] = device_types
    context['default_version'] = version
    context['versions'] = versions
    context['default_begin_date'] = str(begin_date)
    context['default_end_date'] = str(end_date)
    context['contents'] = items
    if len(items) > 0:
        context['has_data'] = True

    logger.info("query %s multiQos, cost: %s" %
                (table.__name__, (current_time() - begin_time)))

    return context
Esempio n. 15
0
def get_server_url_distribute(request, dev=""):
    url_distribute = {}
    try:
        server_ip = request.GET.get('server_ip')
        begin_date = request.GET.get('begin_date')
        end_date = request.GET.get('end_date')
        code = request.GET.get('code')

        url_distribute["mtitle"] = u"Code:%s 对应的URL访问分布情况" % code
        url_distribute["mheader"] = ["URL", "Records", "Ratio(%)"]
        url_distribute["msub"] = []

        # get count of all url records
        sql = "select sum(Records) from view_urlinfo "
        sql += "where IP='%s' and Code=%s " % (server_ip, code)
        sql += "and Date>='%s' and Date<='%s' " % (begin_date, end_date)
        sql += "and Hour<24 and URL!='all' and Records>0"
        logger.debug("Count URL SQL - %s" % sql)

        cu = connection.cursor()

        begin_time = current_time()
        cu.execute(sql)
        results = cu.fetchall()
        logger.info("execute sql:  %s, cost: %s" %
                    (sql, (current_time() - begin_time)))
        total_records = 0
        for row in results:
            if not row[0]:
                log = "There are no URL records of Code %s on Server %s " % (
                    code, server_ip)
                log += "between %s and %s" % (begin_date, end_date)
                raise Exception(log)
            total_records = int(row[0])

        # get count of all url records
        sql = "select URL, sum(Records), 100*sum(Records)/%s " % (
            total_records)
        sql += "from view_urlinfo where IP='%s' and Code=%s " % (server_ip,
                                                                 code)
        sql += "and Date>='%s' and Date<='%s' " % (begin_date, end_date)
        sql += "and Hour<24 and URL!='all' and Records>0 "
        sql += "group by URL order by Records desc"
        logger.debug("URL Distribute SQL - %s" % sql)

        begin_time = current_time()
        cu.execute(sql)
        results = cu.fetchall()
        logger.info("execute sql:  %s, cost: %s" %
                    (sql, (current_time() - begin_time)))

        for row in results:
            try:
                sub = ["%s" % col for col in row]
                url_distribute["msub"].append(sub)
            except Exception, e:
                logger.debug(e)

    except Exception, e:
        logger.debug(e)
        url_distribute["mheader"] = []
Esempio n. 16
0
def generate_report(wb,
                    begin_date,
                    end_date,
                    service_type,
                    device_type,
                    version,
                    version2=""):
    begin_time = current_time()
    (beta_ver, master_ver) = get_version_version2(device_type, version,
                                                  version2)

    book = wb
    sheet = book.add_sheet("version-report")
    sheet.col(0).width = 10000

    heading_xf = ezxf('borders: left thin, right thin, top thin, bottom thin; \
        font: bold on; pattern: pattern solid, fore_colour bright_green')
    data_xf = ezxf('borders: left thin, right thin, top thin, bottom thin; \
        font: name Arial')

    rowx = 0

    #
    # step 0: spec
    #
    spec_xf = ezxf('font: name Arial, colour Red')
    spec_data = get_desc_for_daily_report(begin_date, end_date, \
        beta_ver, master_ver)

    rowx = write_xls(book, sheet, rowx, [], spec_data, [], spec_xf)
    rowx += 2

    #
    # step 1: records
    #
    records_headings = [u'记录数/版本', u'点播', u'回看', u'直播', u'连看', u'总计']
    # prepare data
    records_data = get_records_data(VIEW_TYPES[1:5], begin_date, end_date, service_type, \
        beta_ver, master_ver)
    rowx = write_xls(book, sheet, rowx, records_headings, records_data,
                     heading_xf, data_xf)
    rowx += 2
    print "step 1: ", current_time() - begin_time
    #
    # step 2: single Qos
    #
    single_qos_headings = [u'单指标QoS/版本', u'点播', u'回看', u'直播', u'连看']
    single_qos_data = get_single_qos_data2(VIEW_TYPES[1:5], begin_date,
                                           end_date, service_type, beta_ver,
                                           master_ver)
    rowx = write_xls(book, sheet, rowx, single_qos_headings, single_qos_data,
                     heading_xf, data_xf)
    rowx += 2
    print "step 2: ", current_time() - begin_time

    #
    # step 3: playtm
    #
    playtm_headings = [u'播放时长(分钟)', 'P25', 'P50', 'P75', 'P90', 'P95', u'均值']
    playtm_data = get_playtm_data(begin_date, end_date, service_type, beta_ver,
                                  master_ver)
    rowx = write_xls(book, sheet, rowx, playtm_headings, playtm_data,
                     heading_xf, data_xf)
    rowx += 2
    print "step 3: ", current_time() - begin_time

    #
    # step 4: fbuffer
    #
    fbuffer_headings = [u'首次缓冲时长(秒)', 'P25', 'P50', 'P75', 'P90', 'P95', u'均值']
    fbuffer_data = get_fbuffer_data(begin_date, end_date, service_type,
                                    beta_ver, master_ver)
    rowx = write_xls(book, sheet, rowx, fbuffer_headings, fbuffer_data,
                     heading_xf, data_xf)
    rowx += 2
    print "step 4: ", current_time() - begin_time

    # stuck
    stuck_headings = [u'卡缓冲', 'P25', 'P50', 'P75', 'P90', 'P95']
    stuck_data = get_tplayloading_data(begin_date, end_date, service_type,
                                       device_type, version, version2, STUCK)
    rowx = write_xls(book, sheet, rowx, stuck_headings, stuck_data, heading_xf,
                     data_xf)
    rowx += 2

    # dbuffer
    dbuffer_headings = [u'拖动缓冲', 'P25', 'P50', 'P75', 'P90', 'P95']
    dbuffer_data = get_tplayloading_data(begin_date, end_date, service_type,
                                         device_type, version, version2,
                                         DBUFFER)
    rowx = write_xls(book, sheet, rowx, dbuffer_headings, dbuffer_data,
                     heading_xf, data_xf)
    rowx += 2

    #
    # step 5: remarks
    #
    remark_xf = ezxf('font: name Arial, colour Red')
    remarks = [u'备注: ', u'一次不卡比例:无卡顿播放次数/加载成功的播放次数', u'卡用户卡时间比:卡顿总时长/卡顿用户播放总时长',\
        u'缓冲异常值过滤:如果P95<3秒,则认为数据有问题', u'播放时长异常值过滤:如果P95小于30分钟,则认为数据有问题', \
        u'多天报表的算均值:算均值可能存在差错']
    rowx = write_remarks_to_xls(book, sheet, rowx, remarks, remark_xf)
    rowx += 2
    print "step 4: ", current_time() - begin_time

    logger.info("generate_report:  %s - %s, cost: %s" %
                (begin_date, end_date, (current_time() - begin_time)))
    print begin_date, end_date, beta_ver, current_time() - begin_time
Esempio n. 17
0
 def wrapper(*args, **kwargs):
     begin_time = current_time()
     result = func(*args, **kwargs)
     cost_time = current_time() - begin_time
     logger.debug("{0}({1},{2}) cost {3}s".format(func.__name__, args, kwargs, cost_time))
     return result