Exemplo n.º 1
0
def mpstat_ana(filename):
    """
    Get the cpu usage from the mpstat summary file

    @param filename: filename of the mpstat summary file
    """
    mpstat_result = open(filename, "r")
    key_value = "%idle"
    index = 0
    result = {}
    for line in mpstat_result.readlines():
        if key_value in line:
            index = line.split().index(key_value) + 1
        else:
            data = line.split()
            if data[0] == "all":
                vcpu = "all"
            else:
                vcpu = "vcpu%s" % data[0]
            cpu_use = "%20.2f" % (100 - utils_test.aton(data[index]))
            result[vcpu] = cpu_use
    return result
Exemplo n.º 2
0
def mpstat_ana(filename):
    """
    Get the cpu usage from the mpstat summary file

    @param filename: filename of the mpstat summary file
    """
    mpstat_result = open(filename, 'r')
    key_value = "%idle"
    index = 0
    result = {}
    for line in mpstat_result.readlines():
        if key_value in line:
            index = line.split().index(key_value) + 1
        else:
            data = line.split()
            if data[0] == "all":
                vcpu = "all"
            else:
                vcpu = "vcpu%s" % data[0]
            cpu_use = "%20.2f" % (100 - utils_test.aton(data[index]))
            result[vcpu] = cpu_use
    return result
Exemplo n.º 3
0
def result_sum(topdir, params, guest_ver, resultsdir, test):
    case_type = params.get("test")
    unit_std = params.get("unit_std", "M")
    no_table_list = params.get("no_table_list", "").split()
    ignore_cases = params.get("ignore_cases", "").split()
    repeatn = ""
    if "repeat" in test.outputdir:
        repeatn = re.findall("repeat\d+", test.outputdir)[0]
    category_key = re.split("/", test.outputdir)[-1]
    category_key = re.split(case_type, category_key)[0]
    category_key = re.sub("\.repeat\d+", "", category_key)

    kvm_ver = utils.system_output(params.get('ver_cmd', "rpm -q qemu-kvm"))
    host_ver = os.uname()[2]
    test.write_test_keyval({'kvm-userspace-ver': kvm_ver})
    test.write_test_keyval({'host-kernel-ver': host_ver})
    test.write_test_keyval({'guest-kernel-ver': guest_ver})
    #Find the results files

    results_files = {}
    file_list = [
        'guest_result', 'guest_monitor_result.*sum', 'host_monitor_result.*sum'
    ]
    if params.get("file_list"):
        file_list = params.get("file_list").split()

    for files in os.walk(topdir):
        if files[2]:
            for file in files[2]:
                jump_flag = False
                for ignore_case in ignore_cases:
                    if ignore_case in files[0]:
                        jump_flag = True
                if jump_flag:
                    continue
                file_dir_norpt = re.sub("\.repeat\d+", "", files[0])
                if (repeatn in files[0] and category_key in file_dir_norpt
                        and case_type in files[0]):
                    for i, pattern in enumerate(file_list):
                        if re.findall(pattern, file):
                            prefix = re.findall("%s\.[\d\w_\.]+" % case_type,
                                                file_dir_norpt)[0]
                            prefix = re.sub("\.|_", "--", prefix)
                            if prefix not in results_files.keys():
                                results_files[prefix] = []
                                tmp = []
                                for j in range(len(file_list)):
                                    tmp.append(None)
                                results_files[prefix] = tmp
                            tmp_file = utils_misc.get_path(files[0], file)
                            results_files[prefix][i] = tmp_file

    #Start to read results from results file and monitor file
    results_matrix = {}
    no_table_results = {}
    thread_tag = params.get("thread_tag", "thread")
    order_list = []
    for prefix in results_files:
        marks = params.get("marks", "").split()
        case_infos = prefix.split("--")
        case_type = case_infos[0]
        threads = ""
        refresh_order_list = True
        prefix_perf = prefix
        if case_type == "ffsb":
            category = "-".join(case_infos[:-1])
            threads = case_infos[-1]
        elif case_type == "qcow2perf":
            refresh_order_list = False
            if len(case_infos) > 2:
                category = "-".join(case_infos[:-2])
                thread_tag = case_infos[-2]
                threads = " "
                marks[0] = re.sub("TIME", case_infos[-1], marks[0])
            else:
                category = case_infos[-1]
                marks[0] = re.sub("TIME", case_infos[-1], marks[0])
            prefix_perf = "--".join(case_infos[:-1])
        else:
            category = "-".join(case_infos)
        if refresh_order_list:
            order_list = []
        if (category not in results_matrix.keys()
                and category not in no_table_list):
            results_matrix[category] = {}
        if threads:
            if threads not in results_matrix[category].keys():
                results_matrix[category][threads] = {}
                results_matrix["thread_tag"] = thread_tag
            tmp_dic = results_matrix[category][threads]
        elif category not in no_table_list:
            tmp_dic = results_matrix[category]

        result_context_file = open(results_files[prefix][0], 'r')
        result_context = result_context_file.read()
        result_context_file.close()
        for mark in marks:
            mark_tag, mark_key = mark.split(":")
            datas = re.findall(mark_key, result_context)
            if isinstance(datas[0], tuple):
                data = time_ana(datas[0])
            else:
                tmp_data = 0.0
                for data in datas:
                    if re.findall("[bmkg]", data, re.I):
                        data = utils_misc.normalize_data_size(data, unit_std)
                    tmp_data += float(data)
                data = str(tmp_data)
            if data:
                if mark_tag in no_table_list:
                    no_table_results[mark_tag] = utils_test.aton(data)
                    perf_value = no_table_results[mark_tag]
                else:
                    tmp_dic[mark_tag] = utils_test.aton(data)
                    perf_value = tmp_dic[mark_tag]
            else:
                raise error.TestError("Can not get the right data from result."
                                      "Please check the debug file.")
            if mark_tag not in no_table_list and mark_tag not in order_list:
                order_list.append(mark_tag)
            test.write_perf_keyval({ '%s-%s' % (prefix_perf, mark_tag) : \
                                     perf_value })
        # start analyze the mpstat results
        if params.get('mpstat') == "yes":
            guest_cpu_infos = mpstat_ana(results_files[prefix][1])
            for vcpu in guest_cpu_infos:
                if vcpu != "all":
                    tmp_dic[vcpu] = float(guest_cpu_infos[vcpu])
                    order_list.append(vcpu)
            host_cpu_infos = mpstat_ana(results_files[prefix][2])
            tmp_dic["Hostcpu"] = float(host_cpu_infos["all"])
            order_list.append("Hostcpu")
        # Add some special key for cases
        if case_type == "ffsb":
            tmp_dic["MBps_per_Hostcpu"] = (tmp_dic["Thro-MBps"] /
                                           tmp_dic["Hostcpu"])
            order_list.append("MBps_per_Hostcpu")
        elif case_type == "iozone":
            sum_kbps = 0
            for mark in marks:
                mark_tag, _ = mark.split(":")
                sum_kbps += tmp_dic[mark_tag]
            tmp_dic["SUMKbps_per_Hostcpu"] = sum_kbps / tmp_dic["Hostcpu"]
            order_list.append("SUMKbps_per_Hostcpu")

    sum_marks = params.get("sum_marks", "").split()
    sum_matrix = {}
    order_line = ""
    if results_matrix.get("thread_tag"):
        headline = "%20s|" % results_matrix["thread_tag"]
        results_matrix.pop("thread_tag")
    else:
        headline = ""
    for index, tag in enumerate(order_list):
        headline += "%s|" % format_result(tag)
        order_line += "DATA%d|" % index
    headline = headline.rstrip("|")
    order_line = order_line.rstrip("|")

    result_path = utils_misc.get_path(resultsdir, "%s-result.RHS" % case_type)
    if os.path.isfile(result_path):
        result_file = open(result_path, "r+")
    else:
        result_file = open(result_path, "w")
        result_file.write("### kvm-userspace-version : %s\n" % kvm_ver)
        result_file.write("### kvm-version : %s\n" % host_ver)
        result_file.write("### guest-kernel-version :%s\n" % guest_ver)

    test.write_test_keyval({'category': headline})
    result_file.write("Category:ALL\n")
    matrix_order = params.get("matrix_order", "").split()
    if not matrix_order:
        matrix_order = results_matrix.keys()
        matrix_order.sort()
    for category in matrix_order:
        out_loop_line = order_line
        result_file.write("%s\n" % category)
        line = ""
        write_out_loop = True
        result_file.write("%s\n" % headline)
        for item in results_matrix[category]:
            if isinstance(results_matrix[category][item], dict):
                tmp_dic = results_matrix[category][item]
                line = "%s|" % format_result(item)
                for tag in order_list:
                    line += "%s|" % format_result(tmp_dic[tag])
                    if tag in sum_marks:
                        sum_matrix = get_sum_result(sum_matrix, tmp_dic[tag],
                                                    tag)
                result_file.write("%s\n" % line.rstrip("|"))
                write_out_loop = False
            else:
                #line += "%s|" % format_result(results_matrix[category][item])
                re_data = "DATA%s" % order_list.index(item)
                out_loop_line = re.sub(
                    re_data, format_result(results_matrix[category][item]),
                    out_loop_line)
                if tag in sum_marks:
                    sum_matrix = get_sum_result(sum_matrix, tmp_dic[tag], tag)
        if write_out_loop:
            result_file.write("%s\n" % out_loop_line)

    if sum_matrix:
        if case_type == "ffsb":
            sum_matrix["MBps_per_Hostcpu"] = (sum_matrix["Thro-MBps"] /
                                              sum_matrix["Hostcpu"])
            sum_marks.append("MBps_per_Hostcpu")
        result_file.write("Category:SUM\n")
        headline = ""
        line = ""
        if len(sum_matrix) < 4:
            for i in range(4 - len(sum_matrix)):
                headline += "%20s|" % "None"
                line += "%20d|" % 0
        for tag in sum_marks:
            headline += "%20s|" % tag
            line += "%s|" % format_result(sum_matrix[tag])

        result_file.write("%s\n" % headline.rstrip("|"))
        result_file.write("%s\n" % line.rstrip("|"))

    if no_table_results:
        no_table_order = params.get("no_table_order", "").split()
        if not no_table_order:
            no_table_order = no_table_results.keys()
            no_table_order.sort()
        for item in no_table_order:
            result_file.write("%s: %s\n" % (item, no_table_results[item]))

    result_file.close()
Exemplo n.º 4
0
def ffsb_sum(topdir, prefix, params, guest_ver, resultsdir):
    marks = ["Transactions per Second", "Read Throughput", "Write Throughput"]
    matrix = []
    sum_thro = 0
    sum_hostcpu = 0

    cmd = 'find %s|grep "%s.*guest_results/guest_result"|grep -v prepare|sort' \
          % (topdir, prefix)
    for guest_result_file in commands.getoutput(cmd).split():
        sub_dir = os.path.dirname(guest_result_file)
        content = open(guest_result_file, "r").readlines()
        linestr = []
        readthro = 0
        writethro = 0

        for line in content:
            if marks[0] in line:
                iops = "%8s" % re.split("\s+", line)[0]
            elif marks[1] in line:
                substr = re.findall("\d+(?:\.\d+)*", line)[0]
                readthro = utils_test.aton("%.2f" % float(substr))
            elif marks[2] in line:
                substr = re.findall("\d+(?:\.\d+)*", line)[0]
                writethro = utils_test.aton("%.2f" % float(substr))
                break

        throughput = readthro + writethro
        linestr.append(iops)
        linestr.append(throughput)
        sum_thro += throughput

        filename = glob.glob(os.path.join(sub_dir,
                                          "guest_monitor_result*.sum"))[0]
        sr = open(filename, "r").readlines()
        linestr.append("%8.2f" % (100 - utils_test.aton(sr[1].split()[3])))
        linestr.append("%8.2f" % (100 - utils_test.aton(sr[2].split()[3])))

        filename = glob.glob(os.path.join(sub_dir,
                                          "host_monitor_result*.sum"))[0]
        sr = open(filename, "r").readlines()
        hostcpu = 100 - utils_test.aton(sr[-1].split()[3])
        linestr.append(hostcpu)
        sum_hostcpu += hostcpu
        linestr.append("%.2f" % (throughput / hostcpu))
        matrix.append(linestr)

    headstr = "threads|    IOPS|   Thro(MBps)|   Vcpu1|   Vcpu2|   Hostcpu|" \
              " MBps/Hostcpu%"
    categories = params["categories"].split('|')
    threads = params["threads"].split()
    kvm_ver = commands.getoutput(params.get('ver_cmd', "rpm -q qemu-kvm"))

    fd = open("%s/ffsb-result.RHS" % resultsdir, "w")
    fd.write("#ver# %s\n#ver# host kernel: %s\n#ver# guest kernel:%s\n" %
             (kvm_ver, os.uname()[2], guest_ver))

    desc = """#desc# The Flexible Filesystem Benchmark(FFSB) is a cross-platform
#desc# filesystem performance measurement tool. It uses customizable profiles
#desc# to measure of different workloads, and it supports multiple groups of
#desc# threads across multiple filesystems.
#desc# How to read the results:
#desc# - The Throughput is measured in MBps/sec.
#desc# - IOPS (Input/Output Operations Per Second, pronounced eye-ops)
#desc# - Usage of Vcpu, Hostcpu are all captured
#desc#
"""
    fd.write(desc)
    fd.write("Category:SUM\n   None|    MBps|      Hostcpu|MBps/Hostcpu%\n")
    fd.write("      0|%8.2f|%13.2f|%8.2f\n" % (sum_thro, sum_hostcpu,
                                               (sum_thro / sum_hostcpu)))
    fd.write("Category:ALL\n")
    idx = 0
    for i in range(len(matrix)):
        if i % 3 == 0:
            fd.write("%s\n%s\n" % (categories[idx], headstr))
            idx += 1
        fd.write("%7s|%8s|%13s|%8s|%8s|%10s|%14s\n" %
                 (threads[i % 3], matrix[i][0], matrix[i][1], matrix[i][2],
                  matrix[i][3], matrix[i][4], matrix[i][5]))
    fd.close()
Exemplo n.º 5
0
            cmd = "mem.static_random_fill()"
    _execute_allocator(cmd, vm, session, fill_timeout)
    time.sleep(120)

    s, sharing_page_2 = commands.getstatusoutput(query_cmd)
    if query_regex:
        sharing_page_2 = re.findall(query_regex, sharing_page_2)[0]

    sharing_page = [sharing_page_0, sharing_page_1, sharing_page_2]
    for i in sharing_page:
        if re.findall("[A-Za-z]", i):
            data = i[0:-1]
            unit = i[-1]
            index = sharing_page.index(i)
            if unit == "g":
                sharing_page[index] = utils_test.aton(data) * 1024
            else:
                sharing_page[index] = utils_test.aton(data)

    fail_type = 0
    if test_type == "disable":
        if int(sharing_page[0]) != 0 and int(sharing_page[1]) != 0:
            fail_type += 1
    else:
        if int(sharing_page[0]) >= int(sharing_page[1]):
            fail_type += 2
        if int(sharing_page[1]) <= int(sharing_page[2]):
            fail_type += 4

    fail = ["Sharing page increased abnormally",
            "Sharing page didn't increase", "Sharing page didn't split"]
Exemplo n.º 6
0
                cmd = "mem.static_random_fill()"
        _execute_allocator(cmd, vm, session, fill_timeout)
        time.sleep(120)

        s, sharing_page_2 = commands.getstatusoutput(query_cmd)
        if query_regex:
            sharing_page_2 = re.findall(query_regex, sharing_page_2)[0]

        sharing_page = [sharing_page_0, sharing_page_1, sharing_page_2]
        for i in sharing_page:
            if re.findall("[A-Za-z]", i):
                data = i[0:-1]
                unit = i[-1]
                index = sharing_page.index(i)
                if unit == "g":
                    sharing_page[index] = utils_test.aton(data) * 1024
                else:
                    sharing_page[index] = utils_test.aton(data)

        fail_type = 0
        if test_type == "disable":
            if int(sharing_page[0]) != 0 and int(sharing_page[1]) != 0:
                fail_type += 1
        else:
            if int(sharing_page[0]) >= int(sharing_page[1]):
                fail_type += 2
            if int(sharing_page[1]) <= int(sharing_page[2]):
                fail_type += 4

        fail = [
            "Sharing page increased abnormally",
Exemplo n.º 7
0
def ffsb_sum(topdir, prefix, params, guest_ver, resultsdir):
    marks = ["Transactions per Second",  "Read Throughput", "Write Throughput"]
    matrix = []
    sum_thro = 0
    sum_hostcpu = 0

    cmd = 'find %s|grep "%s.*guest_results/guest_result"|grep -v prepare|sort' \
          % (topdir, prefix)
    for guest_result_file in commands.getoutput(cmd).split():
        sub_dir = os.path.dirname(guest_result_file)
        content = open(guest_result_file, "r").readlines()
        linestr = []
        readthro = 0
        writethro = 0

        for line in content:
            if marks[0] in line:
                iops = "%8s" % re.split("\s+", line)[0]
            elif marks[1] in line:
                substr = re.findall("\d+(?:\.\d+)*", line)[0]
                readthro = utils_test.aton("%.2f" % float(substr))
            elif marks[2] in line:
                substr = re.findall("\d+(?:\.\d+)*", line)[0]
                writethro = utils_test.aton("%.2f" % float(substr))
                break

        throughput = readthro + writethro
        linestr.append(iops)
        linestr.append(throughput)
        sum_thro += throughput

        filename = glob.glob(os.path.join(sub_dir, "guest_monitor_result*.sum"))[0]
        sr = open(filename, "r").readlines()
        linestr.append("%8.2f" % (100 - utils_test.aton(sr[1].split()[3])))
        linestr.append("%8.2f" % (100 - utils_test.aton(sr[2].split()[3])))

        filename = glob.glob(os.path.join(sub_dir, "host_monitor_result*.sum"))[0]
        sr = open(filename, "r").readlines()
        hostcpu = 100 - utils_test.aton(sr[-1].split()[3])
        linestr.append(hostcpu)
        sum_hostcpu += hostcpu
        linestr.append("%.2f" % (throughput/hostcpu))
        matrix.append(linestr)

    headstr = "threads|    IOPS|   Thro(MBps)|   Vcpu1|   Vcpu2|   Hostcpu|" \
              " MBps/Hostcpu%"
    categories = params.get("categories").split('|')
    threads = params.get("threads").split()
    kvm_ver = commands.getoutput(params.get('ver_cmd', "rpm -q qemu-kvm"))

    fd = open("%s/ffsb-result.RHS" % resultsdir, "w")
    fd.write("#ver# %s\n#ver# host kernel: %s\n#ver# guest kernel:%s\n" % (
             kvm_ver, os.uname()[2], guest_ver))

    desc = """#desc# The Flexible Filesystem Benchmark(FFSB) is a cross-platform
#desc# filesystem performance measurement tool. It uses customizable profiles
#desc# to measure of different workloads, and it supports multiple groups of
#desc# threads across multiple filesystems.
#desc# How to read the results:
#desc# - The Throughput is measured in MBps/sec.
#desc# - IOPS (Input/Output Operations Per Second, pronounced eye-ops)
#desc# - Usage of Vcpu, Hostcpu are all captured
#desc#
"""
    fd.write(desc)
    fd.write("Category:SUM\n   None|    MBps|      Hostcpu|MBps/Hostcpu%\n")
    fd.write("      0|%8.2f|%13.2f|%8.2f\n" % (sum_thro, sum_hostcpu,
                                               (sum_thro/sum_hostcpu)))
    fd.write("Category:ALL\n")
    idx = 0
    for i in range(len(matrix)):
        if i % 3 == 0:
            fd.write("%s\n%s\n" % (categories[idx], headstr))
            idx += 1
        fd.write("%7s|%8s|%13s|%8s|%8s|%10s|%14s\n" % (threads[i%3],
                 matrix[i][0], matrix[i][1], matrix[i][2], matrix[i][3],
                 matrix[i][4], matrix[i][5]))
    fd.close()
Exemplo n.º 8
0
def result_sum(topdir, params, guest_ver, resultsdir, test):
    case_type = params.get("test")
    unit_std = params.get("unit_std", "M")
    no_table_list = params.get("no_table_list", "").split()
    ignore_cases = params.get("ignore_cases", "").split()
    repeatn = ""
    if "repeat" in test.outputdir:
        repeatn = re.findall("repeat\d+", test.outputdir)[0]
    category_key = re.split("/", test.outputdir)[-1]
    category_key = re.split(case_type, category_key)[0]
    category_key = re.sub("\.repeat\d+", "", category_key)

    kvm_ver = utils.system_output(params.get("ver_cmd", "rpm -q qemu-kvm"))
    host_ver = os.uname()[2]
    test.write_test_keyval({"kvm-userspace-ver": kvm_ver})
    test.write_test_keyval({"host-kernel-ver": host_ver})
    test.write_test_keyval({"guest-kernel-ver": guest_ver})
    # Find the results files

    results_files = {}
    file_list = ["guest_result", "guest_monitor_result.*sum", "host_monitor_result.*sum"]
    if params.get("file_list"):
        file_list = params.get("file_list").split()

    for files in os.walk(topdir):
        if files[2]:
            for file in files[2]:
                jump_flag = False
                for ignore_case in ignore_cases:
                    if ignore_case in files[0]:
                        jump_flag = True
                if jump_flag:
                    continue
                file_dir_norpt = re.sub("\.repeat\d+", "", files[0])
                if repeatn in files[0] and category_key in file_dir_norpt and case_type in files[0]:
                    for i, pattern in enumerate(file_list):
                        if re.findall(pattern, file):
                            prefix = re.findall("%s\.[\d\w_\.]+" % case_type, file_dir_norpt)[0]
                            prefix = re.sub("\.|_", "--", prefix)
                            if prefix not in results_files.keys():
                                results_files[prefix] = []
                                tmp = []
                                for j in range(len(file_list)):
                                    tmp.append(None)
                                results_files[prefix] = tmp
                            tmp_file = utils_misc.get_path(files[0], file)
                            results_files[prefix][i] = tmp_file

    # Start to read results from results file and monitor file
    results_matrix = {}
    no_table_results = {}
    thread_tag = params.get("thread_tag", "thread")
    order_list = []
    for prefix in results_files:
        marks = params.get("marks", "").split()
        case_infos = prefix.split("--")
        case_type = case_infos[0]
        threads = ""
        refresh_order_list = True
        prefix_perf = prefix
        if case_type == "ffsb":
            category = "-".join(case_infos[:-1])
            threads = case_infos[-1]
        elif case_type == "qcow2perf":
            refresh_order_list = False
            if len(case_infos) > 2:
                category = "-".join(case_infos[:-2])
                thread_tag = case_infos[-2]
                threads = " "
                marks[0] = re.sub("TIME", case_infos[-1], marks[0])
            else:
                category = case_infos[-1]
                marks[0] = re.sub("TIME", case_infos[-1], marks[0])
            prefix_perf = "--".join(case_infos[:-1])
        else:
            category = "-".join(case_infos)
        if refresh_order_list:
            order_list = []
        if category not in results_matrix.keys() and category not in no_table_list:
            results_matrix[category] = {}
        if threads:
            if threads not in results_matrix[category].keys():
                results_matrix[category][threads] = {}
                results_matrix["thread_tag"] = thread_tag
            tmp_dic = results_matrix[category][threads]
        elif category not in no_table_list:
            tmp_dic = results_matrix[category]

        result_context_file = open(results_files[prefix][0], "r")
        result_context = result_context_file.read()
        result_context_file.close()
        for mark in marks:
            mark_tag, mark_key = mark.split(":")
            datas = re.findall(mark_key, result_context)
            if isinstance(datas[0], tuple):
                data = time_ana(datas[0])
            else:
                tmp_data = 0.0
                for data in datas:
                    if re.findall("[bmkg]", data, re.I):
                        data = utils_misc.normalize_data_size(data, unit_std)
                    tmp_data += float(data)
                data = str(tmp_data)
            if data:
                if mark_tag in no_table_list:
                    no_table_results[mark_tag] = utils_test.aton(data)
                    perf_value = no_table_results[mark_tag]
                else:
                    tmp_dic[mark_tag] = utils_test.aton(data)
                    perf_value = tmp_dic[mark_tag]
            else:
                raise error.TestError("Can not get the right data from result." "Please check the debug file.")
            if mark_tag not in no_table_list and mark_tag not in order_list:
                order_list.append(mark_tag)
            test.write_perf_keyval({"%s-%s" % (prefix_perf, mark_tag): perf_value})
        # start analyze the mpstat results
        if params.get("mpstat") == "yes":
            guest_cpu_infos = mpstat_ana(results_files[prefix][1])
            for vcpu in guest_cpu_infos:
                if vcpu != "all":
                    tmp_dic[vcpu] = float(guest_cpu_infos[vcpu])
                    order_list.append(vcpu)
            host_cpu_infos = mpstat_ana(results_files[prefix][2])
            tmp_dic["Hostcpu"] = float(host_cpu_infos["all"])
            order_list.append("Hostcpu")
        # Add some special key for cases
        if case_type == "ffsb":
            tmp_dic["MBps_per_Hostcpu"] = tmp_dic["Thro-MBps"] / tmp_dic["Hostcpu"]
            order_list.append("MBps_per_Hostcpu")
        elif case_type == "iozone":
            sum_kbps = 0
            for mark in marks:
                mark_tag, _ = mark.split(":")
                sum_kbps += tmp_dic[mark_tag]
            tmp_dic["SUMKbps_per_Hostcpu"] = sum_kbps / tmp_dic["Hostcpu"]
            order_list.append("SUMKbps_per_Hostcpu")

    sum_marks = params.get("sum_marks", "").split()
    sum_matrix = {}
    order_line = ""
    if results_matrix.get("thread_tag"):
        headline = "%20s|" % results_matrix["thread_tag"]
        results_matrix.pop("thread_tag")
    else:
        headline = ""
    for index, tag in enumerate(order_list):
        headline += "%s|" % format_result(tag)
        order_line += "DATA%d|" % index
    headline = headline.rstrip("|")
    order_line = order_line.rstrip("|")

    result_path = utils_misc.get_path(resultsdir, "%s-result.RHS" % case_type)
    if os.path.isfile(result_path):
        result_file = open(result_path, "r+")
    else:
        result_file = open(result_path, "w")
        result_file.write("### kvm-userspace-version : %s\n" % kvm_ver)
        result_file.write("### kvm-version : %s\n" % host_ver)
        result_file.write("### guest-kernel-version :%s\n" % guest_ver)

    test.write_test_keyval({"category": headline})
    result_file.write("Category:ALL\n")
    matrix_order = params.get("matrix_order", "").split()
    if not matrix_order:
        matrix_order = results_matrix.keys()
        matrix_order.sort()
    for category in matrix_order:
        out_loop_line = order_line
        result_file.write("%s\n" % category)
        line = ""
        write_out_loop = True
        result_file.write("%s\n" % headline)
        for item in results_matrix[category]:
            if isinstance(results_matrix[category][item], dict):
                tmp_dic = results_matrix[category][item]
                line = "%s|" % format_result(item)
                for tag in order_list:
                    line += "%s|" % format_result(tmp_dic[tag])
                    if tag in sum_marks:
                        sum_matrix = get_sum_result(sum_matrix, tmp_dic[tag], tag)
                result_file.write("%s\n" % line.rstrip("|"))
                write_out_loop = False
            else:
                # line += "%s|" % format_result(results_matrix[category][item])
                re_data = "DATA%s" % order_list.index(item)
                out_loop_line = re.sub(re_data, format_result(results_matrix[category][item]), out_loop_line)
                if tag in sum_marks:
                    sum_matrix = get_sum_result(sum_matrix, tmp_dic[tag], tag)
        if write_out_loop:
            result_file.write("%s\n" % out_loop_line)

    if sum_matrix:
        if case_type == "ffsb":
            sum_matrix["MBps_per_Hostcpu"] = sum_matrix["Thro-MBps"] / sum_matrix["Hostcpu"]
            sum_marks.append("MBps_per_Hostcpu")
        result_file.write("Category:SUM\n")
        headline = ""
        line = ""
        if len(sum_matrix) < 4:
            for i in range(4 - len(sum_matrix)):
                headline += "%20s|" % "None"
                line += "%20d|" % 0
        for tag in sum_marks:
            headline += "%20s|" % tag
            line += "%s|" % format_result(sum_matrix[tag])

        result_file.write("%s\n" % headline.rstrip("|"))
        result_file.write("%s\n" % line.rstrip("|"))

    if no_table_results:
        no_table_order = params.get("no_table_order", "").split()
        if not no_table_order:
            no_table_order = no_table_results.keys()
            no_table_order.sort()
        for item in no_table_order:
            result_file.write("%s: %s\n" % (item, no_table_results[item]))

    result_file.close()