def __match_index(self, file): """获取unixbench屏幕输出 """ result_dic = {}.fromkeys(unixbench_keys, 0) result_lines = lutils.read_all_lines(file) # flag_dic = {} for parallel in self.parallels: re_match = "\d+ CPU\S in system; running %d parallel cop\S+ of tests" % parallel parallel_result_dic = result_dic.copy() for line in result_lines: if re.search(re_match, line, re.I): parallel_index = result_lines.index(line) paralell_result_list = [ self.__get_value(result_lines, parallel_index + index) for index in (16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 29) ] for l, v in zip( tuple(unixbench_keys), tuple([lutils.change_type(i) for i in paralell_result_list]) ): parallel_result_dic[l] = "%.1f" % v parallel_result_attrib = self.create_result_node_attrib( "Average", self.times, parallel, self.parallels ) self.result_list.append([parallel_result_attrib, parallel_result_dic])
def create_result(self): '''创建result_list ''' key_dic = {} sum_dic = {} file = os.path.join(self.tmp_dir, "bonnie.out") lptlog.debug("读取 %s 文件" % file) if not os.path.isfile(file): raise IOError, "open %s Error" % file else: results_lines = utils.read_all_lines(file) labels = ["name","file_size","putc","putc_cpu","put_block","put_block_cpu","rewrite","rewrite_cpu", "getc","getc_cpu","get_block","get_block_cpu","seeks","seeks_cpu","num_files","seq_create", "seq_create_cpu","seq_stat","seq_stat_cpu","seq_del","seq_del_cpu","ran_create","ran_create_cpu", "ran_stat","ran_stat_cpu","ran_del","ran_del_cpu" ] keys = labels keys.pop(0) keys.pop(0) keys.pop(12) keys = tuple(keys) iter=0 for line in results_lines: fields = line.split(',') if len(fields) != 27: continue if fields[0] == "name": continue else: iter += 1 lptlog.debug("line.split==27: %s" % line) #attrib_dic = {'iter':str(iter), 'times':str(self.times), 'parallel':'1', 'parallels':'1' , #"name":fields[0], "filesize":fields[1], "num_files":fields[14]} attrib_dic = self.create_result_node_attrib(iter, self.times, 1, [1]) #remove 多余项 fields.pop(0) fields.pop(0) fields.pop(12) fields = tuple([utils.change_type(i) for i in fields]) for l, v in zip(keys, fields): key_dic[l] = "%d" % v if not sum_dic: sum_dic = key_dic.copy() else: sum_dic = method.append_sum_dict(sum_dic, key_dic) self.result_list.append([attrib_dic, key_dic]) if sum_dic: parallel_average_dic = method.append_average_dict(sum_dic, self.times) lptlog.debug("1 并行求平均值:PASS" ) sum_attrib_dic = self.create_result_node_attrib("Average", self.times, 1, [1]) self.result_list.append([sum_attrib_dic, parallel_average_dic])
def __match_index(self, file): '''获取unixbench屏幕输出 ''' result_dic = {}.fromkeys(unixbench_keys, 0) result_lines = lutils.read_all_lines(file) # flag_dic = {} for parallel in self.parallels: re_match = "\d+ CPU\S in system; running %d parallel cop\S+ of tests" % parallel parallel_result_dic = result_dic.copy() for line in result_lines: if re.search(re_match, line, re.I): parallel_index = result_lines.index(line) paralell_result_list = [ self.__get_value(result_lines, parallel_index + index) for index in (16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 29) ] for l, v in zip( tuple(unixbench_keys), tuple([ lutils.change_type(i) for i in paralell_result_list ])): parallel_result_dic[l] = "%.1f" % v parallel_result_attrib = self.create_result_node_attrib( "Average", self.times, parallel, self.parallels) self.result_list.append( [parallel_result_attrib, parallel_result_dic])
def __match_index(self, file): '''获取unixbench屏幕输出 ''' self.parallels = [1] self.times = 3 result_dic = {}.fromkeys(x11perf_keys, 0) result_lines = utils.read_all_lines(file) for parallel in self.parallels: re_match = "[\d]+ CPUs in system; running %d parallel copy of tests" % parallel parallel_result_dic = result_dic.copy() for line in result_lines: if re.search(re_match, line, re.I): parallel_index = result_lines.index(line) paralell_result_list = [ self.__get_value(result_lines, parallel_index + index) for index in (10, 11, 12, 13, 14, 15) ] for l, v in zip( tuple(x11perf_keys), tuple([ utils.change_type(i) for i in paralell_result_list ])): parallel_result_dic[l] = "%.1f" % v parallel_result_attrib = self.create_result_node_attrib( "Average", self.times, parallel, self.parallels) self.result_list.append( [parallel_result_attrib, parallel_result_dic])
def __match_index(self, file): if not os.path.isfile(file): return [] lptlog.debug("在%s中搜索测试指标" % file) results_lines = utils.read_all_lines(file) result_list = [] labels = ('TCP_STREAM','UDP_STREAM', 'TCP_RR', 'UDP_RR', 'TCP_CRR') parallel_template = {'parallels': '1', 'parallel': '1', 'iter': '1', 'times': '2'} result1 = (results_lines[6].split()[4],results_lines[13].split()[3],results_lines[21].split()[5],results_lines[29].split()[5],results_lines[37].split()[5]) result2 = (results_lines[45].split()[4],results_lines[52].split()[3],results_lines[60].split()[5],results_lines[68].split()[5],results_lines[76].split()[5]) tmp_list = [] parallel_dict = copy.deepcopy(parallel_template) tmp_list.append(parallel_dict) tmp_list.append(self.dict_generator(labels,result1)) result_list.append(tmp_list) tmp_list = [] parallel_dict = copy.deepcopy(parallel_template) parallel_dict['iter'] = '2' tmp_list.append(parallel_dict) tmp_list.append(self.dict_generator(labels,result2)) result_list.append(tmp_list) tmp_list = [] parallel_dict = copy.deepcopy(parallel_template) parallel_dict['iter'] = 'Average' tmp_list.append(parallel_dict) tmp_list.append(self.dict_average(result_list[0][1],result_list[1][1])) result_list.append(tmp_list) return result_list
def __match_index(self, file): ''' @return: 测试指标,dict @attention: 采用autotest中部分代码 ''' keylist = {} if not os.path.isfile(file): return [] lptlog.debug("在%s中搜索测试指标" % file) results_lines = utils.read_all_lines(file) if self.testmode == "speed": #labels = ('write', 'rewrite', 'read', 'reread', 'randread','randwrite', # 'bkwdread', 'recordrewrite', 'strideread', 'fwrite', # 'frewrite', 'fread', 'freread') labels = ('write', 'rewrite', 'read', 'reread', 'randread', 'randwrite') for line in results_lines: fields = line.split() if len(fields) != 8: continue lptlog.debug("line.split==8: %s" % line) try: fields = tuple([int(i) for i in fields]) except Exception: continue for l, v in zip(labels, fields[2:]): key_name = "%s" % l keylist[key_name] = "%d" % v else: child_regexp = re.compile( r'Children see throughput for[\s]+([\d]+)[\s]+([\S]+|[\S]+[\s][\S]+)[\s]+=[\s]+([\w]+)*' ) section = None w_count = 0 for line in results_lines: line = line.strip() # Check for the beginning of a new result section match = child_regexp.search(line) if match: # Extract the section name and the worker count w_count = int(match.group(1)) lptlog.debug("w_count:%s" % w_count) section = self.__get_section_name(match.group(2)) lptlog.debug("section:%s" % section) # Output the appropriate keyval pair #key_name = '%s-kids' % section #keylist[key_name] = match.group(3) keylist[section] = match.group(3) return keylist
def __match_index(self, file): ''' @return: 测试指标,dict @attention: 采用autotest中部分代码 ''' keylist = {} if not os.path.isfile(file): return [] lptlog.debug("在%s中搜索测试指标" % file) results_lines = lutils.read_all_lines(file) if self.testmode == "speed": #labels = ('write', 'rewrite', 'read', 'reread', 'randread','randwrite', # 'bkwdread', 'recordrewrite', 'strideread', 'fwrite', # 'frewrite', 'fread', 'freread') labels = ('write', 'rewrite', 'read', 'reread', 'randread','randwrite') for line in results_lines: fields = line.split() if len(fields) != 8: continue lptlog.debug("line.split==8: %s" % line) try: fields = tuple([int(i) for i in fields]) except Exception: continue for l, v in zip(labels, fields[2:]): key_name = "%s" % l keylist[key_name] = "%d" % v else: child_regexp = re.compile(r'Children see throughput for[\s]+([\d]+)[\s]+([\S]+|[\S]+[\s][\S]+)[\s]+=[\s]+([\w]+)*') section = None w_count = 0 for line in results_lines: #line = line.strip() # Check for the beginning of a new result section match = child_regexp.search(line) if match: # Extract the section name and the worker count w_count = int(match.group(1)) lptlog.debug("w_count:%s" % w_count) section = self.__get_section_name(match.group(2)) lptlog.debug("section:%s" % section) # Output the appropriate keyval pair #key_name = '%s-kids' % section #keylist[key_name] = match.group(3) keylist[section] = match.group(3) return keylist
def __search_result(self, file): r= re.compile(r'Copy*',re.I) lines = lutils.read_all_lines(file) for line in lines: if r.match(line): copy = line.split()[1] index = lines.index(line) scale = lines[index+1].split()[1] add = lines[index+2].split()[1] triad = lines[index+3].split()[1] return tuple(map(lutils.change_type, [copy, scale, add, triad])) return (0, 0, 0, 0)
def __search_result(self, file): r= re.compile(r'Copy*',re.I) lines = utils.read_all_lines(file) for line in lines: if r.match(line): copy = line.split()[1] index = lines.index(line) scale = lines[index+1].split()[1] add = lines[index+2].split()[1] triad = lines[index+3].split()[1] return tuple(map(utils.change_type, [copy, scale, add, triad])) return (0, 0, 0, 0)
def create_result(self): '''创建result_list ''' #labels = ("Throughtput", "clients", "max_latency") labels = ("Throughtput", "max_latency") parallelstring = ",".join(map(str, self.parallels)) r = re.compile( r"Throughput\s+(\d+.\d+)\s+MB/sec\s+(\d+)\s+clients\s+\d+\s+procs\s+max_latency=(\d+.\d+)\s", re.I) for parallel in self.parallels: sum_dic = {} for iter in range(self.times): tmp_result_file = os.path.join( self.tmp_dir, "%s_%s_%s.out" % (self.tool, parallel, iter + 1)) if not os.path.isfile(tmp_result_file): lptlog.warning("%s 不存在" % tmp_result_file) continue result_lines = utils.read_all_lines(tmp_result_file) for line in result_lines: key_dic = {} if r.match(line): m = r.match(line) #result_list = [m.group(1), m.group(2), m.group(3)] result_list = [m.group(1), m.group(3)] result_tuple = tuple( [utils.change_type(i) for i in result_list]) for l, v in zip(labels, result_tuple): key_dic[l] = "%d" % v if not sum_dic: sum_dic = key_dic.copy() else: sum_dic = method.append_sum_dict(sum_dic, key_dic) self.result_list.append([ self.create_result_node_attrib( iter + 1, self.times, parallel, self.parallels), key_dic ]) if sum_dic: parallel_average_dic = method.append_average_dict( sum_dic, self.times) lptlog.debug("%d 并行求平均值:PASS" % parallel) self.result_list.append([ self.create_result_node_attrib("Average", self.times, parallel, self.parallels), parallel_average_dic ])
def __match_index(self, game, file): '''搜索index @return: 返回指定game的result_list,平均值, ''' labels = ('initialised', 'completed', 'total') result_list = [] result_dict = {} if not os.path.isfile(file): lptlog.debug("%s 不存在") return result_list r_init_time = re.compile( r'(?P<thread>\d+) threads initialised in (?P<initialised>\d+) usec' ) r_complete_time = re.compile( r"(?P<games>\d+) games completed in (?P<completed>\d+) usec") results_lines = lutils.read_all_lines(file) init_time_list = [] complete_time_list = [] #获取初始化时间和完成时间,返回两个list for line in results_lines: if r_init_time.match(line): init_time = r_init_time.match(line).group("initialised") init_time_list.append(init_time) if r_complete_time.match(line): complete_time = r_complete_time.match(line).group("completed") complete_time_list.append(complete_time) #初始化时间求平均值 init_time_average = lutils.average_list( lutils.string_to_float(init_time_list), bits=0) #完成时间求平均值 complete_time_average = lutils.average_list( lutils.string_to_float(complete_time_list), bits=0) sum_time = init_time_average + complete_time_average #定义result字典 for l, v in zip(labels, (init_time_average, complete_time_average, sum_time)): result_dict[l] = "%d" % v #定义result属性 result_node_attrib = self.create_result_node_attrib( "Average", self.times, game * 2, [i * 2 for i in self.games]) result_list.append(result_dict) result_list.insert(0, result_node_attrib) return result_list
def __match_index(self, file): if not os.path.isfile(file): return [] lptlog.debug("在%s中搜索测试指标" % file) results_lines = utils.read_all_lines(file) labels = ('io', 'aggrb', 'minb', 'maxb', 'mint','maxt') parallel_template = {'parallels': '1,2,3,4', 'parallel': '1', 'iter': '1', 'times': '2'} result_list = [] count = 0 for line in results_lines: if 'READ:' in line: tmp_list = [] parallel_dict = copy.deepcopy(parallel_template) parallel_dict['parallel'] = str(count / 2 + 1) parallel_dict['iter'] = 'READ' tmp_list.append(parallel_dict) tmp_list.append(self.dict_generator(labels,line)) result_list.append(tmp_list) count = count + 1 elif 'WRITE:' in line: tmp_list = [] parallel_dict = copy.deepcopy(parallel_template) parallel_dict['parallel'] = str(count / 2 + 1) parallel_dict['iter'] = 'WRITE' tmp_list.append(parallel_dict) tmp_list.append(self.dict_generator(labels,line)) result_list.append(tmp_list) count = count + 1 if count in [2,4,6,8]: tmp_list = [] dict2 = result_list[-1][1] dict1 = result_list[-2][1] parallel_dict = copy.deepcopy(parallel_template) parallel_dict['parallel'] = str(count / 2) parallel_dict['iter'] = 'Average' tmp_list.append(parallel_dict) tmp_list.append(self.dict_average(dict1, dict2)) result_list.append(tmp_list) return result_list
def __match_index(self, file): '''获取unixbench屏幕输出 ''' self.parallels = [1] self.times = 3 result_dic = {}.fromkeys(x11perf_keys, 0) result_lines = utils.read_all_lines(file) for parallel in self.parallels: re_match = "[\d]+ CPUs in system; running %d parallel copy of tests" % parallel parallel_result_dic = result_dic.copy() for line in result_lines: if re.search(re_match, line, re.I): parallel_index = result_lines.index(line) paralell_result_list = [ self.__get_value(result_lines, parallel_index+index) for index in (10, 11, 12, 13, 14, 15) ] for l,v in zip(tuple(x11perf_keys), tuple([utils.change_type(i) for i in paralell_result_list])): parallel_result_dic[l] = "%.1f" % v parallel_result_attrib = self.create_result_node_attrib("Average", self.times, parallel, self.parallels) self.result_list.append([parallel_result_attrib, parallel_result_dic])
def __match_index(self, game, file): '''搜索index @return: 返回指定game的result_list,平均值, ''' labels = ('initialised', 'completed', 'total') result_list = [] result_dict = {} if not os.path.isfile(file): lptlog.debug("%s 不存在") return result_list r_init_time = re.compile(r'(?P<thread>\d+) threads initialised in (?P<initialised>\d+) usec') r_complete_time = re.compile(r"(?P<games>\d+) games completed in (?P<completed>\d+) usec") results_lines = utils.read_all_lines(file) init_time_list = [] complete_time_list = [] #获取初始化时间和完成时间,返回两个list for line in results_lines: if r_init_time.match(line): init_time = r_init_time.match(line).group("initialised") init_time_list.append(init_time) if r_complete_time.match(line): complete_time = r_complete_time.match(line).group("completed") complete_time_list.append(complete_time) #初始化时间求平均值 init_time_average = utils.average_list(utils.string_to_float(init_time_list), bits=0) #完成时间求平均值 complete_time_average = utils.average_list(utils.string_to_float(complete_time_list), bits=0) sum_time = init_time_average + complete_time_average #定义result字典 for l,v in zip(labels, (init_time_average, complete_time_average, sum_time)): result_dict[l] = "%d" % v #定义result属性 result_node_attrib = self.create_result_node_attrib("Average", self.times, game*2, [i*2 for i in self.games]) result_list.append(result_dict) result_list.insert(0, result_node_attrib) return result_list
def create_result(self): '''创建result_list ''' #labels = ("Throughtput", "clients", "max_latency") labels = ("Throughtput", "max_latency") parallelstring = ",".join(map(str, self.parallels)) r = re.compile(r"Throughput\s+(\d+.\d+)\s+MB/sec\s+(\d+)\s+clients\s+\d+\s+procs\s+max_latency=(\d+.\d+)\s", re.I) for parallel in self.parallels: sum_dic = {} for iter in range(self.times): tmp_result_file = os.path.join(self.tmp_dir, "%s_%s_%s.out" % (self.tool, parallel, iter+1)) if not os.path.isfile(tmp_result_file): lptlog.warning("%s 不存在" % tmp_result_file) continue result_lines = utils.read_all_lines(tmp_result_file) for line in result_lines: key_dic = {} if r.match(line): m = r.match(line) #result_list = [m.group(1), m.group(2), m.group(3)] result_list = [m.group(1), m.group(3)] result_tuple = tuple([utils.change_type(i)for i in result_list]) for l, v in zip(labels, result_tuple): key_dic[l] = "%d" % v if not sum_dic: sum_dic = key_dic.copy() else: sum_dic = method.append_sum_dict(sum_dic, key_dic) self.result_list.append([self.create_result_node_attrib(iter+1, self.times, parallel, self.parallels), key_dic]) if sum_dic: parallel_average_dic = method.append_average_dict(sum_dic, self.times) lptlog.debug("%d 并行求平均值:PASS" % parallel) self.result_list.append([self.create_result_node_attrib("Average", self.times, parallel, self.parallels), parallel_average_dic])