Ejemplo n.º 1
0
    def output_statistic_CSV(self,data,CSV_path):
        """Csv files Output."""
        if not data:
            return 

        parse_type = utils.get_parse_type(os.path.basename(CSV_path))
        print("")
        print(os.path.basename(CSV_path),parse_type)
        logging.debug("+ %s" % os.path.basename(CSV_path))
        logging.debug("  %s" % parse_type)

        read_datas = []
        write_datas = []
        ioD_datas = []


        csvfile = file(CSV_path,'wb')
        writer = csv.writer(csvfile)
        for block_name, blockData_dic in data.items():
            if 'read_block_str' == block_name:
                writer.writerow(myconf.FIELDS_READ)
                for name,data in blockData_dic.items():
                    line = []
                    line.append(name)
                    line.extend(data)
                    read_datas.append(tuple(line))
                writer.writerows(read_datas)
                writer.writerow("")

            if 'write_block_str' == block_name:
                writer.writerow(myconf.FIELDS_WRITE)
                for name,data in blockData_dic.items():
                    line = []
                    line.append(name)
                    line.extend(data)
                    write_datas.append(tuple(line))
                writer.writerows(write_datas)
                writer.writerow("")

            if 'ioDistrbution_block_str' == block_name:
                writer.writerow(myconf.FIELDS_ioD)
                for name,data in blockData_dic.items():
                    line = []
                    line.append(name)
                    line.extend(data)
                    ioD_datas.append(tuple(line))
                writer.writerows(ioD_datas)
                writer.writerow("")

        if None == parse_type:
            print("@csv generated with warn@  " + CSV_path)
            logging.debug("@csv generated with warn@   %s" % CSV_path)
        else:
            print("@csv generated successfully@  " + CSV_path)
            logging.debug("@csv generated successfully@   %s" % CSV_path)

        csvfile.close()
Ejemplo n.º 2
0
    def output_batch_CSV(self,logFile_list,CSV_path):
        """Csv files Output."""
        parse_type = utils.get_parse_type(os.path.basename(CSV_path))

        print(os.path.basename(CSV_path),parse_type)
        logging.debug("+ %s" % os.path.basename(CSV_path))
        logging.debug("  %s" % parse_type)

        read_datas = []
        write_datas = []
        lat_datas = []
        ioD_datas = []

        for logObj in logFile_list:
            print("  "+logObj.fileName)
            logging.debug("  - %s" % logObj.fileName)
            if myconf.PARSE_TYPE["mix"] == parse_type:
                read_datas.append(self.get_readField_data(logObj))
                write_datas.append(self.get_writeField_data(logObj))

            if myconf.PARSE_TYPE["pure_r"] == parse_type:
                read_datas.append(self.get_readField_data(logObj))
                write_datas = [""]

            if myconf.PARSE_TYPE["pure_w"] == parse_type:
                write_datas.append(self.get_writeField_data(logObj))
                read_datas = [""]
            lat_datas.append(self.get_latField_data(logObj,myconf.FIELDS_LAT))
            ioD_datas.append(self.get_ioDField_data(logObj,myconf.FIELDS_ioD))

        csvfile = file(CSV_path,'wb')
        writer = csv.writer(csvfile)

        writer.writerow(myconf.FIELDS_READ)
        writer.writerows(read_datas)
        writer.writerow("")
        writer.writerow(myconf.FIELDS_WRITE)
        writer.writerows(write_datas)
        writer.writerow("")
        writer.writerow(myconf.FIELDS_LAT)
        writer.writerows(lat_datas)
        writer.writerow("")
        writer.writerow(myconf.FIELDS_ioD)
        writer.writerows(ioD_datas)

        if None == parse_type:
            print("@csv generated with warn@  " + CSV_path)
            logging.debug("@csv generated with warn@   %s" % CSV_path)
        else:
            print("@csv generated successfully@  " + CSV_path)
            logging.debug("@csv generated successfully@   %s" % CSV_path)

        csvfile.close()
Ejemplo n.º 3
0
    def get_statistic(self,file_path):
        """Get statistic from the csv file_path given.

        """

        if self.prohibition == True:
            print("Sorry ,the power is illegal !")
            return 

        raw_lines = utils.cut_file(file_path)

        lines = []
        for l in raw_lines:
            if '\n' != l and '' != l and None != l:
                lines.append(l.replace("\r",""))

        parse_type = utils.get_parse_type(file_path)
        blocks_str = self.split_file(lines,parse_type)

        file_stat = {}
        for block_name, blockData_list in blocks_str.items():
            if None == blockData_list:
                continue
            dic = {}
            num = {}
            for item in blockData_list:
                line_list = item.split(",")
                match = re.search("(.*)/vm", line_list[0])
                if match:
                    group = match.group(1)
                    num[group] = 0;
                    dic[group] = [0.0]*(len(line_list)-1)

            for item in blockData_list:
                line_list = item.split(",")
                match = re.search("(.*)/vm", line_list[0])
                if match:
                    group = match.group(1)
                    num[group] += 1;
                for i in range(1,len(line_list)):
                    dic[group][i-1] += round(string.atof(line_list[i]), 2)                 

            if 'read_block_str' == block_name or 'write_block_str' == block_name:
                for group_name ,group_data_list in dic.items():
                    group_data_list[2] = round(group_data_list[2] / num[group_name],2)
                    group_data_list[3] = round(group_data_list[3] / num[group_name],2)
                    group_data_list[4] = round(group_data_list[4] / num[group_name],2)
                    #print ("######",round(group_data_list[4] / num[group_name],2))

            file_stat[block_name] = dic

        return file_stat
Ejemplo n.º 4
0
    def log_file_maker(self,file_path):
        """Most important parsing fio-log logic.
        1.cuting lines
        2.parsing string into several string blocks
        3.puting string blocks into a dic blocks
        4.generating LogFile object with its arributes
        5.return LogFile object
        """

        lines = utils.cut_file(file_path)
        blocks_str = self.split_report(lines)
        blocks = self.get_blocks(blocks_str)
        parse_type = utils.get_parse_type(file_path)
        log_file = LogFile(file_path,parse_type,blocks["read"],blocks["write"],blocks["latPercent"],blocks["ioDistribution"])
        return log_file
Ejemplo n.º 5
0
    def getTimeAxis(self,dataLogFile_list):

        parserT = utils.get_parse_type(dataLogFile_list[0].get_fileName())

        if myconf.PARSE_TYPE["mix"] == parserT or myconf.PARSE_TYPE["pure_r"] == parserT:
            timeArea = self.getNzeroLines(dataLogFile_list[0].get_readBlock())
            for dataLogObj in dataLogFile_list:
                listlen = self.getNzeroLines(dataLogObj.get_readBlock())
                if listlen < timeArea:
                    timeArea = listlen
            return timeArea
        elif myconf.PARSE_TYPE["pure_w"] == parserT:
            timeArea = self.getNzeroLines(dataLogFile_list[0].get_writeBlock())
            for dataLogObj in dataLogFile_list:
                listlen = self.getNzeroLines(dataLogObj.get_writeBlock())
                if listlen < timeArea:
                    timeArea = listlen
            return timeArea
Ejemplo n.º 6
0
    def output_batch_data_CSV(self,dataLogFile_list,CSV_path):
        """Csv files Output."""

        timeNum = self.getTimeAxis(dataLogFile_list)
        parserType = utils.get_parse_type(CSV_path)
        colNum = len(dataLogFile_list)
        dataLines = []
        fields = ["time"]

        
        statisticsMin = ["Min"]
        statisticsMax = ["Max"]
        statisticsAvg = ["Avg"]
        statisticsVar = ["Var"]

        if myconf.PARSE_TYPE["mix"] == parserType:
            for i in range(1, timeNum):
                line = []
                line.append(i*5000)
                for obj in dataLogFile_list:
                    name = obj.get_fileName()
                    if 0 == fields.count(os.path.basename(os.path.dirname(name)) + "-read"):
                        fields.append(os.path.basename(os.path.dirname(name)) + "-read")
                    if 0 == fields.count(os.path.basename(os.path.dirname(name)) + "-write"):
                        fields.append(os.path.basename(os.path.dirname(name)) + "-write")
                    line.append(obj.get_readBlock()[i])
                    line.append(obj.get_writeBlock()[i])
                dataLines.append(line)
            
            for obj in dataLogFile_list:
                obj.init_statics(timeNum)
                statisticsMin.append(obj.get_min_r())
                statisticsMax.append(obj.get_max_r())
                statisticsAvg.append(obj.get_avg_r())
                statisticsVar.append(obj.get_var_r())

                statisticsMin.append(obj.get_min_w())
                statisticsMax.append(obj.get_max_w())
                statisticsAvg.append(obj.get_avg_w())
                statisticsVar.append(obj.get_var_w())            

        if myconf.PARSE_TYPE["pure_r"] == parserType:
            for i in range(1, timeNum):
                line = []
                line.append(i*5000)
                for obj in dataLogFile_list:
                    name = obj.get_fileName()
                    if 0 == fields.count(os.path.basename(os.path.dirname(name)) + "-read"):
                        fields.append(os.path.basename(os.path.dirname(name)) + "-read")                    
                    line.append(obj.get_readBlock()[i])
                dataLines.append(line)
            for obj in dataLogFile_list:
                obj.init_statics(timeNum)
                statisticsMin.append(obj.get_min_r())
                statisticsMax.append(obj.get_max_r())
                statisticsAvg.append(obj.get_avg_r())
                statisticsVar.append(obj.get_var_r())

        if myconf.PARSE_TYPE["pure_w"] == parserType:
            for i in range(1, timeNum):
                line = []
                line.append(i*5000)
                for obj in dataLogFile_list:
                    name = obj.get_fileName()
                    if 0 == fields.count(os.path.basename(os.path.dirname(name)) + "-write"):
                        fields.append(os.path.basename(os.path.dirname(name)) + "-write")
                    line.append(obj.get_writeBlock()[i])
                dataLines.append(line)
            for obj in dataLogFile_list:
                obj.init_statics(timeNum)   
                statisticsMin.append(obj.get_min_w())
                statisticsMax.append(obj.get_max_w())
                statisticsAvg.append(obj.get_avg_w())
                statisticsVar.append(obj.get_var_w())

        #print(os.path.basename(CSV_path),parserType)
        logging.debug("+ %s" % os.path.basename(CSV_path))
        logging.debug("  %s" % parserType)

        csvfile = file(CSV_path,'wb')
        writer = csv.writer(csvfile)

        #print(fields)
        writer.writerow(fields)

        for line in dataLines:
            writer.writerow(line)

        writer.writerow(statisticsMin)
        writer.writerow(statisticsMax)
        writer.writerow(statisticsAvg)
        writer.writerow(statisticsVar)


        if None == parserType:
            print("@csv generated with warn@  " + CSV_path)
            logging.debug("@csv generated with warn@   %s" % CSV_path)
        else:
            print("@csv generated successfully@  " + CSV_path)
            logging.debug("@csv generated successfully@   %s" % CSV_path)

        csvfile.close()
Ejemplo n.º 7
0
    def data_log_file_maker(self,file_path):
        """Most important parsing data-log logic.
        1.cuting lines
        2.tracing the lines in the log
        3.generating the readBlock and writeBlock
        4.generating DataLogFile object with its arributes
        """
        #print(file_path)
        parseType = utils.get_parse_type(file_path)
        lines = utils.cut_file(file_path)

        newlines = []
        for l in lines:
            if '\n' != l and '' != l and None != l:
                newlines.append(l)

        #tailLine = ''
        #i = 0
        #while '' == tailLine:
        #    i -= 1
        #    tailLine = ''.join(lines[i].split())   
        #tailLineItems = tailLine.split(",")
        #num = int(round(int(tailLineItems[0])/5000.0)) + 1
        num = len(newlines) + 1
        readBlock = [0] * num
        writeBlock = [0] * num

        lastIndex_r = 1
        lastIndex_w = 1

        for line in lines:
            line = ''.join(line.split())
            #print("###@ line:%s" % line)
            if '' == line:
                continue
            lineItems = line.split(",")
            index = int(round(int(lineItems[0])/5000.0))
            
            #Ensure the current - last == 1, set the repair to fix
            repair = 500
            if '0' == lineItems[2]:
                while index - lastIndex_r > 1:
                    index = int(round((int(lineItems[0]) - repair)/5000.0))
                    repair += 500
                lastIndex_r = index
                if 0 == readBlock[index]:
                    readBlock[index] = int(lineItems[1])


            if '1' == lineItems[2]:
                while index - lastIndex_w > 1:
                    index = int(round((int(lineItems[0]) - repair)/5000.0))
                    repair += 500
                lastIndex_w = index
                if 0 == writeBlock[index]:
                    writeBlock[index] = int(lineItems[1])

            #if '0' == lineItems[2]:
            #    if 0 == readBlock[index]:
            #        readBlock[index] = int(lineItems[1])
            #elif '1' == lineItems[2]:
            #    if 0 == writeBlock[index]:
            #        writeBlock[index] = int(lineItems[1])

        data_log_file = DataLogFile(file_path,parseType,readBlock,writeBlock)

        return data_log_file