def get_fluctuation(self,file_path): """Get fluctuation-statistic from the csv file_path given. """ if self.prohibition == True: print("Sorry ,the power is illegal !") return raw_lines = utils.cut_file(file_path) lines = [] for l in raw_lines: if '\n' != l and '' != l and None != l: lines.append(l.replace("\r","")) title_line = [] avg_line = [] title_line = lines[0] _avg_lines = lines[-5:] for item in _avg_lines: if 'avg' in item.lower(): avg_line = item avg_list = avg_line.split(",") title_list = title_line.split(",") new_title = [] for item in title_list: if 'time' in item.lower(): new_title.append(item) continue new_title.append(item) new_title.append(item+"-ratio") file_stat = [] file_stat.append(new_title) data_lines = lines[1:] for line in data_lines: if 'var' in line.lower(): continue new_line = [] line_list = line.split(",") new_line.append(line_list[0]) field_num = 1 for item in line_list[1:]: new_line.append(item) item_ratio = self.count_ratio(item,avg_list[field_num]) new_line.append(item_ratio) field_num += 1 file_stat.append(new_line) #print("DEBUG-file_stat",file_stat) return file_stat
def get_statistic(self,file_path): """Get statistic from the csv file_path given. """ if self.prohibition == True: print("Sorry ,the power is illegal !") return raw_lines = utils.cut_file(file_path) lines = [] for l in raw_lines: if '\n' != l and '' != l and None != l: lines.append(l.replace("\r","")) parse_type = utils.get_parse_type(file_path) blocks_str = self.split_file(lines,parse_type) file_stat = {} for block_name, blockData_list in blocks_str.items(): if None == blockData_list: continue dic = {} num = {} for item in blockData_list: line_list = item.split(",") match = re.search("(.*)/vm", line_list[0]) if match: group = match.group(1) num[group] = 0; dic[group] = [0.0]*(len(line_list)-1) for item in blockData_list: line_list = item.split(",") match = re.search("(.*)/vm", line_list[0]) if match: group = match.group(1) num[group] += 1; for i in range(1,len(line_list)): dic[group][i-1] += round(string.atof(line_list[i]), 2) if 'read_block_str' == block_name or 'write_block_str' == block_name: for group_name ,group_data_list in dic.items(): group_data_list[2] = round(group_data_list[2] / num[group_name],2) group_data_list[3] = round(group_data_list[3] / num[group_name],2) group_data_list[4] = round(group_data_list[4] / num[group_name],2) #print ("######",round(group_data_list[4] / num[group_name],2)) file_stat[block_name] = dic return file_stat
def log_file_maker(self,file_path): """Most important parsing fio-log logic. 1.cuting lines 2.parsing string into several string blocks 3.puting string blocks into a dic blocks 4.generating LogFile object with its arributes 5.return LogFile object """ lines = utils.cut_file(file_path) blocks_str = self.split_report(lines) blocks = self.get_blocks(blocks_str) parse_type = utils.get_parse_type(file_path) log_file = LogFile(file_path,parse_type,blocks["read"],blocks["write"],blocks["latPercent"],blocks["ioDistribution"]) return log_file
def get_total(self,file_path): """Get total-statistic from the csv file_path given. """ if self.prohibition == True: print("Sorry ,the power is illegal !") return raw_lines = utils.cut_file(file_path) lines = [] for l in raw_lines: if '\n' != l and '' != l and None != l: lines.append(l.replace("\r","")) title_line = [] title_line = lines[0] title_list = title_line.split(",") new_title = [] vm_name_dic = {} for item in title_list: if 'time' in item.lower(): new_title.append(item) else: new_title.append(item) match = re.search("(.*)-read",item) if match: group = match.group(1) vm_name_dic[group] = group + '-total' for key in sorted(vm_name_dic.keys()): new_title.append(vm_name_dic[key]) new_title.append("Read-total") new_title.append("Write-total") vm_num = len(vm_name_dic) file_stat = [] file_stat.append(new_title) data_lines = lines[1:] for line in data_lines: if 'var' in line.lower(): continue new_line = [] vm_each_total_dic = {} line_total = 0 line_write_total = 0 line_read_total = 0 line_list = line.split(",") #new_line.append(line_list[0]) new_line = line_list field_num = 1 for i in range(1,vm_num+1): vm_each_total_dic[i] = float(line_list[2*i-1]) + float(line_list[2*i]) for i in range(1,vm_num*2+1): line_total += float(line_list[i]) if 0 == i % 2: line_write_total += float(line_list[i]) line_read_total = line_total - line_write_total for key in sorted(vm_each_total_dic.keys()): new_line.append(vm_each_total_dic[key]) new_line.append(line_read_total) new_line.append(line_write_total) file_stat.append(new_line) return file_stat
def data_log_file_maker(self,file_path): """Most important parsing data-log logic. 1.cuting lines 2.tracing the lines in the log 3.generating the readBlock and writeBlock 4.generating DataLogFile object with its arributes """ #print(file_path) parseType = utils.get_parse_type(file_path) lines = utils.cut_file(file_path) newlines = [] for l in lines: if '\n' != l and '' != l and None != l: newlines.append(l) #tailLine = '' #i = 0 #while '' == tailLine: # i -= 1 # tailLine = ''.join(lines[i].split()) #tailLineItems = tailLine.split(",") #num = int(round(int(tailLineItems[0])/5000.0)) + 1 num = len(newlines) + 1 readBlock = [0] * num writeBlock = [0] * num lastIndex_r = 1 lastIndex_w = 1 for line in lines: line = ''.join(line.split()) #print("###@ line:%s" % line) if '' == line: continue lineItems = line.split(",") index = int(round(int(lineItems[0])/5000.0)) #Ensure the current - last == 1, set the repair to fix repair = 500 if '0' == lineItems[2]: while index - lastIndex_r > 1: index = int(round((int(lineItems[0]) - repair)/5000.0)) repair += 500 lastIndex_r = index if 0 == readBlock[index]: readBlock[index] = int(lineItems[1]) if '1' == lineItems[2]: while index - lastIndex_w > 1: index = int(round((int(lineItems[0]) - repair)/5000.0)) repair += 500 lastIndex_w = index if 0 == writeBlock[index]: writeBlock[index] = int(lineItems[1]) #if '0' == lineItems[2]: # if 0 == readBlock[index]: # readBlock[index] = int(lineItems[1]) #elif '1' == lineItems[2]: # if 0 == writeBlock[index]: # writeBlock[index] = int(lineItems[1]) data_log_file = DataLogFile(file_path,parseType,readBlock,writeBlock) return data_log_file