def buildTitle( self, clientIndex, statType, typeCount, pairs ): """ @summary : This method is used to build the title we'll print on the graphic. Title is built with the current time and the name of the client where we collected the data. Also contains the mean and absolute min and max found in the data used to build the graphic. """ _ = self.getTranslatorForModule( CURRENT_MODULE_ABS_PATH, self.workingLanguage ) maximum = self.getMaxPairValue( pairs ) minimum = self.getMinPairValue( pairs ) if maximum != None : if statType == _("latency"): maximum = "%.2f" %maximum else: maximum = int(maximum) if minimum != None : if statType == _("latency"): minimum = "%.2f" %minimum else: minimum = int(minimum) if statType == _("latency"): _ = self.getTranslatorForModule( CURRENT_MODULE_ABS_PATH, self.outputLanguage ) explanation = _("With values rounded for every minutes.") else: _ = self.getTranslatorForModule( CURRENT_MODULE_ABS_PATH, self.outputLanguage ) explanation = _("With the total value of every minutes.") statType = LanguageTools.translateTerm(statType, self.workingLanguage, self.outputLanguage, CURRENT_MODULE_ABS_PATH) statType = statType[0].upper() + statType[1:] if self.groupName == "": entityName = self.clientNames[clientIndex] else: entityName = self.groupName _ = self.getTranslatorForModule( CURRENT_MODULE_ABS_PATH, self.outputLanguage ) title = _("%s for %s for a span of %s hours ending at %s")\ %( statType, entityName, self.timespan, self.currentTime) + "\\n%s\\n\\n" %explanation +_("MAX: ") + str(maximum) + " " +\ _("MEAN: ") + "%3.2f"%(self.means[clientIndex][typeCount]) + " " + _("MIN: ") +str(minimum) return title
def getFormatedProductTypesForLabel(self): """ @summary : Returns the product type in a format that can be displayed on of of the graphcis labels. @note : If Productype is * or _("All") it will be translated. @return : the product type in a format that can be displayed on of of the graphcis labels. """ _ = self.getTranslatorForModule( CURRENT_MODULE_ABS_PATH, self.workingLanguage ) formattedProductType = "" if self.productTypes[0] == _("All") or self.productTypes[0] == "*": formattedProductType = LanguageTools.translateTerm(_("All"), self.workingLanguage, self.outputLanguage, CURRENT_MODULE_ABS_PATH) else: formattedProductType = self.productTypes[0] formattedProductType = "%-25s" %( (str)( formattedProductType ) ).replace('[','' ).replace( ']', '' ).replace("'","") return formattedProductType
def getPairs( self, clientCount , statType, typeCount ): """ @summary : This method is used to create the data couples used to draw the graphic. Couples are a combination of the data previously gathered and the time at wich data was produced. @note: One point per pair will generally be drawn on the graphic but certain graph types might combine a few pairs before drawing only one point for the entire combination. @warning: If illegal statype is found program will be terminated here. @todo: Using dictionaries instead of arrays might speed thinga up a bit. """ if self.logger != None: _ = self.getTranslatorForModule( CURRENT_MODULE_ABS_PATH, self.outputLanguage ) self.logger.debug( _("Call to getPairs received.") ) k = 0 pairs = [] total = 0 self.nbFiles[clientCount] = 0 self.nbErrors[clientCount] = 0 self.nbFilesOverMaxLatency[clientCount] = 0 nbEntries = len( self.stats[clientCount].statsCollection.timeSeperators )-1 translatedStatType = LanguageTools.translateTerm(statType, self.workingLanguage, "en", CURRENT_MODULE_ABS_PATH) if nbEntries !=0: total = 0 self.minimums[clientCount][typeCount] = 100000000000000000000 #huge integer self.maximums[clientCount][typeCount] = None self.filesWhereMaxOccured[clientCount][typeCount] = "" self.timeOfMax[clientCount][typeCount] = "" for k in range( 0, nbEntries ): try : if len( self.stats[clientCount].statsCollection.fileEntries[k].means ) >=1 : #special manipulation for each type if translatedStatType == "latency": self.nbFilesOverMaxLatency[clientCount] = self.nbFilesOverMaxLatency[ clientCount ] + self.stats[clientCount].statsCollection.fileEntries[k].filesOverMaxLatency elif translatedStatType == "bytecount": self.totalNumberOfBytes[clientCount] = self.totalNumberOfBytes[clientCount] + self.stats[clientCount].statsCollection.fileEntries[k].totals[translatedStatType] elif translatedStatType == "errors": #calculate total number of errors self.nbErrors[clientCount] = self.nbErrors[clientCount] + self.stats[clientCount].statsCollection.fileEntries[k].totals[translatedStatType] #add to pairs if translatedStatType == "errors" or translatedStatType == "bytecount": #both use totals pairs.append( [StatsDateLib.getSecondsSinceEpoch(self.stats[clientCount].statsCollection.timeSeperators[k]), self.stats[clientCount].statsCollection.fileEntries[k].totals[translatedStatType]] ) #print StatsDateLib.getSecondsSinceEpoch(self.stats[clientCount].statsCollection.timeSeperators[k]), self.stats[clientCount].statsCollection.fileEntries[k].totals[translatedStatType] elif translatedStatType == "filecount": pairs.append( [StatsDateLib.getSecondsSinceEpoch(self.stats[clientCount].statsCollection.timeSeperators[k]), self.stats[clientCount].statsCollection.fileEntries[k].nbFiles ] ) else:#latency uses means pairs.append( [ StatsDateLib.getSecondsSinceEpoch(self.stats[clientCount].statsCollection.timeSeperators[k]), self.stats[clientCount].statsCollection.fileEntries[k].means[translatedStatType]] ) #print self.stats[clientCount].statsCollection.timeSeperators[k], self.stats[clientCount].statsCollection.fileEntries[k].means[translatedStatType] if translatedStatType == "filecount": if self.stats[clientCount].statsCollection.fileEntries[k].nbFiles > self.maximums[clientCount][typeCount] : self.maximums[clientCount][typeCount] = self.stats[clientCount].statsCollection.fileEntries[k].nbFiles self.timeOfMax[clientCount][typeCount] = self.stats[clientCount].statsCollection.fileEntries[k].startTime elif self.stats[clientCount].statsCollection.fileEntries[k].nbFiles < self.minimums[clientCount][typeCount] : self.minimums[clientCount][typeCount] = self.stats[clientCount].statsCollection.fileEntries[k].nbFiles elif( self.stats[clientCount].statsCollection.fileEntries[k].maximums[translatedStatType] > self.maximums[clientCount][typeCount] ) : self.maximums[clientCount][typeCount] = self.stats[clientCount].statsCollection.fileEntries[k].maximums[translatedStatType] self.timeOfMax[clientCount][typeCount] = self.stats[clientCount].statsCollection.fileEntries[k].timesWhereMaxOccured[translatedStatType] self.filesWhereMaxOccured[clientCount][typeCount] = self.stats[clientCount].statsCollection.fileEntries[k].filesWhereMaxOccured[translatedStatType] elif self.stats[clientCount].statsCollection.fileEntries[k].minimums[translatedStatType] < self.minimums[clientCount][typeCount] : if not ( translatedStatType == "bytecount" and self.stats[clientCount].statsCollection.fileEntries[k].minimums[translatedStatType] == 0 ): self.minimums[clientCount][typeCount] = self.stats[clientCount].statsCollection.fileEntries[k].minimums[translatedStatType] self.nbFiles[clientCount] = self.nbFiles[clientCount] + self.stats[clientCount].statsCollection.fileEntries[k].nbFiles else: pairs.append( [ StatsDateLib.getSecondsSinceEpoch(self.stats[clientCount].statsCollection.timeSeperators[k]), 0.0 ] ) except KeyError, instance: #print instance _ = self.getTranslatorForModule( CURRENT_MODULE_ABS_PATH, self.workingLanguage ) self.logger.error( _("Error in getPairs.") ) self.logger.error( _("The %s stat type was not found in previously collected data.") %statType ) pairs.append( [ StatsDateLib.getSecondsSinceEpoch(self.stats[clientCount].statsCollection.timeSeperators[k]), 0.0 ] ) pass total = total + pairs[k][1] self.means[clientCount][typeCount] = (total / (k+1) ) if self.nbFiles[clientCount] != 0 : self.ratioOverLatency[clientCount] = float( float(self.nbFilesOverMaxLatency[clientCount]) / float(self.nbFiles[clientCount]) ) *100.0 if self.minimums[clientCount][typeCount] == 100000000000000000000 : self.minimums[clientCount][typeCount] = None #print pairs return pairs
def buildImageName( self ): """ @summary : Builds and returns the absolute fileName so that it can be saved If folder to file does not exists creates it. """ statsPaths = StatsPaths() statsPaths.setPaths(self.outputLanguage) _ = self.getTranslatorForModule( CURRENT_MODULE_ABS_PATH, self.workingLanguage ) entityName = "" if len( self.clientNames ) == 0: entityName = self.clientNames[0] else: if self.groupName == "" : for name in self.clientNames : entityName = entityName + name if name != self.clientNames[ len(self.clientNames) -1 ] : entityName = entityName + "-" else: entityName = self.groupName date = self.currentTime.replace( "-","" ).replace( " ", "_") if self.productTypes[0] == _("All") or self.productTypes[0] == "*": formattedProductName = LanguageTools.translateTerm(_("All"), self.workingLanguage, self.outputLanguage, CURRENT_MODULE_ABS_PATH) else: combinedProductsName = "" for product in self.productTypes: combinedProductsName = combinedProductsName + str(product) + "_" formattedProductName = combinedProductsName[ :-1 ] #remove trailing _ character. _ = self.getTranslatorForModule( CURRENT_MODULE_ABS_PATH, self.outputLanguage ) folder = statsPaths.STATSGRAPHS + _("others/gnuplot/%.50s/") %( entityName ) translatedStatsTypes = [ LanguageTools.translateTerm(statsType, self.workingLanguage, self.outputLanguage, CURRENT_MODULE_ABS_PATH)\ for statsType in self.statsTypes ] fileName = folder + _("%s_%s_%s_%s_%shours_on_%s_for_%s_products.png") %( self.fileType, entityName, date, translatedStatsTypes,\ self.timespan, self.machines, formattedProductName ) fileName = fileName.replace( '[', '').replace(']', '').replace(" ", "").replace( "'","" ) if not os.path.isdir( folder ): os.makedirs( folder, 0777 ) os.chmod(folder, 0777) if len( os.path.basename(fileName) ) > (os.statvfs( folder )[statvfs.F_NAMEMAX]): # length of file too long maximumLength = (os.statvfs( folder )[statvfs.F_NAMEMAX]) - ( 30 + len(date) + len( str(translatedStatsTypes)) + len( str( self.timespan ) ) ) maxIndyLength = maximumLength / 3 #reduce entityname, machine names and products wich are the only parts wich can cause us to bust the maximum filename size. fileName = folder + ( "%s_%." + str( maxIndyLength )+ _("s_%s_%s_%shours_on_%.") + str( maxIndyLength ) + \ _("s_for_%.") + str( maxIndyLength ) + _("s_products.png") ) \ %( self.fileType, entityName, date, translatedStatsTypes, self.timespan,\ self.machines, formattedProductName ) return fileName
def updateGroupedRoundRobinDatabases( infos, logger = None ): """ @summary : This method is to be used to update the database used to stored the merged data of a group. """ endTime = StatsDateLib.getSecondsSinceEpoch( infos.endTime ) tempRRDFileName = RrdUtilities.buildRRDFileName( _("errors"), clients = infos.group, machines = infos.machines, fileType = infos.fileTypes[0] ) startTime = RrdUtilities.getDatabaseTimeOfUpdate( tempRRDFileName, infos.fileTypes[0] ) if startTime == 0 : startTime = StatsDateLib.getSecondsSinceEpoch( StatsDateLib.getIsoTodaysMidnight( infos.endTime ) ) timeSeperators = getTimeSeperatorsBasedOnAvailableMemory( StatsDateLib.getIsoFromEpoch( startTime ), StatsDateLib.getIsoFromEpoch( endTime ), infos.clients, infos.fileTypes[0], infos.machines ) #print timeSeperators for i in xrange(0, len( timeSeperators ),2 ):#timeseperators should always be coming in pairs startTime = StatsDateLib.getSecondsSinceEpoch( timeSeperators[i] ) dataPairs = getPairs( infos.clients, infos.machines, infos.fileTypes[0], timeSeperators[i], timeSeperators[i+1], infos.group, logger ) for dataType in dataPairs: translatedDataType = LanguageTools.translateTerm(dataType, 'en', LanguageTools.getMainApplicationLanguage(), CURRENT_MODULE_ABS_PATH) rrdFileName = RrdUtilities.buildRRDFileName( dataType = translatedDataType, clients = infos.group, groupName = infos.group, machines = infos.machines,fileType = infos.fileTypes[0], usage = "group" ) if not os.path.isfile( rrdFileName ): createRoundRobinDatabase( rrdFileName, startTime, dataType ) if endTime > startTime : j = 0 while dataPairs[ dataType ][j][0] < startTime and j < len( dataPairs[ dataType ] ): #print "going over : %s startime was :%s" %(dataPairs[ dataType ][j][0], startTime) j = j +1 for k in range ( j, len( dataPairs[ dataType ] ) ): #print "updating %s at %s" %(rrdFileName, int( dataPairs[ dataType ][k][0] )) try: rrdtool.update( rrdFileName, '%s:%s' %( int( dataPairs[ dataType ][k][0] ), dataPairs[ dataType ][k][1] ) ) except: if logger != None: try: logger.warning( "Could not update %s. Last update was more recent than %s " %( rrdFileName,int( dataPairs[ dataType ][k][0] ) ) ) except: pass pass else: #print "endTime %s was not bigger than start time %s" %( endTime, startTime ) if logger != None : try: logger.warning( _( "This database was not updated since it's last update was more recent than specified date : %s" ) %rrdFileName ) except: pass RrdUtilities.setDatabaseTimeOfUpdate( tempRRDFileName, infos.fileTypes[0], endTime )
def updateRoundRobinDatabases( client, machines, fileType, endTime, logger = None ): """ @summary : This method updates every database linked to a certain client. @note : Database types are linked to the filetype associated with the client. """ combinedMachineName = "" combinedMachineName = combinedMachineName.join( [machine for machine in machines ] ) tempRRDFileName = RrdUtilities.buildRRDFileName( dataType = _("errors"), clients = [client], machines = machines, fileType = fileType) startTime = RrdUtilities.getDatabaseTimeOfUpdate( tempRRDFileName, fileType ) if startTime == 0 : startTime = StatsDateLib.getSecondsSinceEpoch( StatsDateLib.getIsoTodaysMidnight( endTime ) ) endTime = StatsDateLib.getSecondsSinceEpoch( endTime ) timeSeperators = getTimeSeperatorsBasedOnAvailableMemory(StatsDateLib.getIsoFromEpoch( startTime ), StatsDateLib.getIsoFromEpoch( endTime ), [client], fileType, machines ) for i in xrange( len(timeSeperators) -1 ) : dataPairs = getPairs( [client], machines, fileType, timeSeperators[i], timeSeperators[i+1] , groupName = "", logger = logger ) for dataType in dataPairs: translatedDataType = LanguageTools.translateTerm(dataType, 'en', LanguageTools.getMainApplicationLanguage(), CURRENT_MODULE_ABS_PATH) rrdFileName = RrdUtilities.buildRRDFileName( dataType = translatedDataType, clients = [client], machines = machines, fileType = fileType ) if not os.path.isfile( rrdFileName ): createRoundRobinDatabase( databaseName = rrdFileName , startTime= startTime, dataType = dataType ) if endTime > startTime : j = 0 while dataPairs[ dataType ][j][0] < startTime: j = j +1 for k in range ( j, len( dataPairs[ dataType ] ) ): try: rrdtool.update( rrdFileName, '%s:%s' %( int( dataPairs[ dataType ][k][0] ), dataPairs[ dataType ][k][1] ) ) except: if logger != None: try: logger.warning( "Could not update %s. Last update was more recent than %s " %( rrdFileName,int( dataPairs[ dataType ][k][0] ) ) ) except: pass pass if logger != None : try: logger.info( _( "Updated %s db for %s in db named : %s" ) %( dataType, client, rrdFileName ) ) except: pass else: if logger != None : try: logger.warning( _( "This database was not updated since it's last update was more recent than specified date : %s" ) %rrdFileName ) except: pass RrdUtilities.setDatabaseTimeOfUpdate( rrdFileName, fileType, endTime )
def updateGroupedRoundRobinDatabases(infos, logger=None): """ @summary : This method is to be used to update the database used to stored the merged data of a group. """ endTime = StatsDateLib.getSecondsSinceEpoch(infos.endTime) tempRRDFileName = RrdUtilities.buildRRDFileName( _("errors"), clients=infos.group, machines=infos.machines, fileType=infos.fileTypes[0]) startTime = RrdUtilities.getDatabaseTimeOfUpdate(tempRRDFileName, infos.fileTypes[0]) if startTime == 0: startTime = StatsDateLib.getSecondsSinceEpoch( StatsDateLib.getIsoTodaysMidnight(infos.endTime)) timeSeperators = getTimeSeperatorsBasedOnAvailableMemory( StatsDateLib.getIsoFromEpoch(startTime), StatsDateLib.getIsoFromEpoch(endTime), infos.clients, infos.fileTypes[0], infos.machines) #print timeSeperators for i in xrange(0, len(timeSeperators), 2): #timeseperators should always be coming in pairs startTime = StatsDateLib.getSecondsSinceEpoch(timeSeperators[i]) dataPairs = getPairs(infos.clients, infos.machines, infos.fileTypes[0], timeSeperators[i], timeSeperators[i + 1], infos.group, logger) for dataType in dataPairs: translatedDataType = LanguageTools.translateTerm( dataType, 'en', LanguageTools.getMainApplicationLanguage(), CURRENT_MODULE_ABS_PATH) rrdFileName = RrdUtilities.buildRRDFileName( dataType=translatedDataType, clients=infos.group, groupName=infos.group, machines=infos.machines, fileType=infos.fileTypes[0], usage="group") if not os.path.isfile(rrdFileName): createRoundRobinDatabase(rrdFileName, startTime, dataType) if endTime > startTime: j = 0 while dataPairs[dataType][j][0] < startTime and j < len( dataPairs[dataType]): #print "going over : %s startime was :%s" %(dataPairs[ dataType ][j][0], startTime) j = j + 1 for k in range(j, len(dataPairs[dataType])): #print "updating %s at %s" %(rrdFileName, int( dataPairs[ dataType ][k][0] )) try: rrdtool.update( rrdFileName, '%s:%s' % (int(dataPairs[dataType][k][0]), dataPairs[dataType][k][1])) except: if logger != None: try: logger.warning( "Could not update %s. Last update was more recent than %s " % (rrdFileName, int(dataPairs[dataType][k][0]))) except: pass pass else: #print "endTime %s was not bigger than start time %s" %( endTime, startTime ) if logger != None: try: logger.warning( _("This database was not updated since it's last update was more recent than specified date : %s" ) % rrdFileName) except: pass RrdUtilities.setDatabaseTimeOfUpdate(tempRRDFileName, infos.fileTypes[0], endTime)
def updateRoundRobinDatabases(client, machines, fileType, endTime, logger=None): """ @summary : This method updates every database linked to a certain client. @note : Database types are linked to the filetype associated with the client. """ combinedMachineName = "" combinedMachineName = combinedMachineName.join( [machine for machine in machines]) tempRRDFileName = RrdUtilities.buildRRDFileName(dataType=_("errors"), clients=[client], machines=machines, fileType=fileType) startTime = RrdUtilities.getDatabaseTimeOfUpdate(tempRRDFileName, fileType) if startTime == 0: startTime = StatsDateLib.getSecondsSinceEpoch( StatsDateLib.getIsoTodaysMidnight(endTime)) endTime = StatsDateLib.getSecondsSinceEpoch(endTime) timeSeperators = getTimeSeperatorsBasedOnAvailableMemory( StatsDateLib.getIsoFromEpoch(startTime), StatsDateLib.getIsoFromEpoch(endTime), [client], fileType, machines) for i in xrange(len(timeSeperators) - 1): dataPairs = getPairs([client], machines, fileType, timeSeperators[i], timeSeperators[i + 1], groupName="", logger=logger) for dataType in dataPairs: translatedDataType = LanguageTools.translateTerm( dataType, 'en', LanguageTools.getMainApplicationLanguage(), CURRENT_MODULE_ABS_PATH) rrdFileName = RrdUtilities.buildRRDFileName( dataType=translatedDataType, clients=[client], machines=machines, fileType=fileType) if not os.path.isfile(rrdFileName): createRoundRobinDatabase(databaseName=rrdFileName, startTime=startTime, dataType=dataType) if endTime > startTime: j = 0 while dataPairs[dataType][j][0] < startTime: j = j + 1 for k in range(j, len(dataPairs[dataType])): try: rrdtool.update( rrdFileName, '%s:%s' % (int(dataPairs[dataType][k][0]), dataPairs[dataType][k][1])) except: if logger != None: try: logger.warning( "Could not update %s. Last update was more recent than %s " % (rrdFileName, int(dataPairs[dataType][k][0]))) except: pass pass if logger != None: try: logger.info( _("Updated %s db for %s in db named : %s") % (dataType, client, rrdFileName)) except: pass else: if logger != None: try: logger.warning( _("This database was not updated since it's last update was more recent than specified date : %s" ) % rrdFileName) except: pass RrdUtilities.setDatabaseTimeOfUpdate(rrdFileName, fileType, endTime)