def getSeperatorsForHourlyTreatments( startTime, endTime, currentFreeMemory, fileSizesPerHour, usage= "rrd" ): """ @summary : returns a list of time seperators based on a list of file and the current amount of free memory. Each seperator represents the time associated with a certain hourly file. Each seperator will represent the maximum amount of files that can be treated at the same time without busting the current memory. @attention: List fo files MUST refer to hourly files. @param startTime: Startime in iso format of the interval to work with. @param endTime: End time in iso format of the interval to work with. @param currentFreeMemory: Maximum amout of memory to use per seperation. @param fileSizesPerHour: size of the file(s) to be treated at every hour. @return: Returns the time seperators. """ currentTotalFileSizes = 0 currentTime = StatsDateLib.getSecondsSinceEpoch(startTime) seperators = [startTime] if fileSizesPerHour[0] < currentFreeMemory: for fileSizePerHour in fileSizesPerHour : currentTotalFileSizes = currentTotalFileSizes + fileSizePerHour if currentFreeMemory < currentTotalFileSizes: seperators.append( StatsDateLib.getIsoFromEpoch(currentTime)) currentTotalFileSizes = 0 currentTime = currentTime + StatsDateLib.HOUR else: raise Exception( "Cannot build seperators. First file will not even fit within current available memory." ) if seperators[len(seperators) -1 ] != endTime : seperators.append( endTime ) if len(seperators) > 2 : #If any "in between seperators were added" i = 1 currentLength = len(seperators) -1 while i < currentLength: #add 1 minute if usage == "rrd": seperators.insert(i+1, StatsDateLib.getIsoFromEpoch( (StatsDateLib.getSecondsSinceEpoch(seperators[i]) + StatsDateLib.MINUTE))) else: seperators.insert( i+1, StatsDateLib.getSecondsSinceEpoch(seperators[i]) ) currentLength = currentLength + 1 i = i + 2 return seperators
def main(): """ @summary: Small test case to see if everything works fine """ statsConfig = StatsConfigParameters() statsConfig.getAllParameters() machineconfig = MachineConfigParameters() machineconfig.getParametersFromMachineConfigurationFile() currentTimeEpochFormat = time.time() - (120 * 60) endTime = StatsDateLib.getIsoWithRoundedHours( StatsDateLib.getIsoFromEpoch(currentTimeEpochFormat)) startTime = StatsDateLib.getIsoWithRoundedHours( StatsDateLib.getIsoFromEpoch(currentTimeEpochFormat - (StatsDateLib.DAY * 7))) print startTime, endTime groupName = statsConfig.groupParameters.groups[0] clients = statsConfig.groupParameters.groupsMembers[groupName] machines = statsConfig.groupParameters.groupsMachines[groupName] fileType = statsConfig.groupParameters.groupFileTypes[groupName] seperators = [startTime] seperators.extend( StatsDateLib.getSeparatorsWithStartTime( startTime=startTime, width=StatsDateLib.DAY * 7, interval=StatsDateLib.HOUR)[:-1]) listOfFiles = PickleMerging.createMergedPicklesList( startTime, endTime, clients, groupName, fileType, machines, seperators) listOfFileSizes = MemoryManagement.getListOfFileSizes(listOfFiles) currentFreeMemory = MemoryManagement.getCurrentFreeMemory(0.55555) if MemoryManagement.getTotalSizeListOfFiles( listOfFiles) > currentFreeMemory: seperators = MemoryManagement.getSeperatorsForHourlyTreatments( startTime, endTime, currentFreeMemory, listOfFileSizes) print seperators else: print "We have %s bytes free and the pickles require %s bytes" % ( currentFreeMemory, getTotalSizeListOfFiles(listOfFiles)) print "we have enough memory to merge all these pickles."
def main(): """ @summary : This program is to be used to backup rrd databases and their corresponding time of update files. Backing up rrd databases at various point in time is a recommended paractice in case newly entered data is not valid. """ setGlobalLanguageParameters() currentTime = time.time() currentTime = StatsDateLib.getIsoFromEpoch( currentTime ) currentTime = StatsDateLib.getIsoWithRoundedSeconds( currentTime ) currentTime = currentTime.replace(" ", "_") backupsToKeep = 20 if len( sys.argv ) == 2: try: backupsToKeep = int( sys.argv[1] ) except: print _( "Days to keep value must be an integer. For default 20 backups value, type nothing." ) sys.exit() backupDatabaseUpdateTimes( currentTime, backupsToKeep ) backupDatabases( currentTime, backupsToKeep )
def setMonths( self ): """ Returns the 3 months including current month. """ currentTime = time.time() currentTime = StatsDateLib.getIsoFromEpoch( currentTime ) currentDate = datetime.date( int(currentTime[0:4]), int(currentTime[5:7]), 1 ) months = [] for i in range(0,5): if currentDate.month -i < 1 : month = currentDate.month -i + 12 year = currentDate.year -i else : month = currentDate.month -i year = currentDate.year newdate = StatsDateLib.getSecondsSinceEpoch( "%s-%s-01 00:00:00" %( year,month ) ) months.append( newdate ) #print year,month,day months.reverse() self.months = months
def printPickledTimes( pickledTimes ): """ @summary: Prints out all the pickled times found. @param pickledTimes: Dictionary containing containing the name -> timeOfUpdate relationships. """ currentTime = time.time() currentTime = StatsDateLib.getIsoFromEpoch(currentTime) keys = pickledTimes.keys() keys.sort() os.system( 'clear' ) print "######################################################################" print "# List of current times of updates. #" print "# Times were found at : %-43s #" %currentTime print "# On the machine named : %-43s #"%LOCAL_MACHINE for key in keys: print("#%32s : %33s#") %( key, pickledTimes[key] ) print "# #" print "######################################################################"
def getStartEndOfWebPage(): """ @summary : Returns the time of the first graphics to be shown on the web page and the time of the last graphic to be displayed. @return : start, end tuple in iso format. """ currentTime = StatsDateLib.getIsoFromEpoch( time.time() ) currentDate = datetime.date( int(currentTime[0:4]), int(currentTime[5:7]), 1 ) nbMonthsToRevwind = NB_MONTHS_DISPLAYED - 1 if currentDate.month - (nbMonthsToRevwind%12) < 1 : month = currentDate.month - (nbMonthsToRevwind%12)+12 if currentDate.month -nbMonthsToRevwind < 1: year = currentDate.year - int( abs(math.floor( float( ( currentDate.month - nbMonthsToRevwind ) / 12 ) ) ) ) else : month = currentDate.month - nbMonthsToRevwind year = currentDate.year start = "%s-%s-%s 00:00:00" %( year,month,"01" ) end = StatsDateLib.getIsoTodaysMidnight( currentTime ) return start, end
def printPickledTimes(pickledTimes): """ @summary: Prints out all the pickled times found. @param pickledTimes: Dictionary containing containing the name -> timeOfUpdate relationships. """ currentTime = time.time() currentTime = StatsDateLib.getIsoFromEpoch(currentTime) keys = pickledTimes.keys() keys.sort() os.system("clear") print "######################################################################" print "# List of current times of updates. #" print "# Times were found at : %-43s #" % currentTime print "# On the machine named : %-43s #" % LOCAL_MACHINE for key in keys: print ("#%32s : %33s#") % (key, pickledTimes[key]) print "# #" print "######################################################################"
def setMonths( self ): """ @Summary : Sets the months value to an array containing the last X months in "since epoch" numbers based on the globally set NB_MONTHS_DISPLAYED value. """ currentTime = time.time() currentTime = StatsDateLib.getIsoFromEpoch( currentTime ) currentDate = datetime.date( int(currentTime[0:4]), int(currentTime[5:7]), 1 ) # day always = 1 in case currentDate.day > 28 months = [] for i in range(0,NB_MONTHS_DISPLAYED): if currentDate.month - (i%12) < 1 : month = currentDate.month - (i%12)+12 if currentDate.month -i < 1: year = currentDate.year - int( abs(math.floor( float( ( currentDate.month - i ) / 12 ) ) ) ) else : month = currentDate.month - i year = currentDate.year months.append( StatsDateLib.getSecondsSinceEpoch( "%s-%s-%s 00:00:00" %(year,month,"01") ) ) months.reverse() self.months = months print months
def addOptions( parser ): """ @summary: This method is used to add all available options to the option parser. """ parser.add_option("-c", "--clients", action="store", type="string", dest="clients", default="ALL", help=_("Clients' names")) parser.add_option("-d", "--daily", action="store_true", dest = "daily", default=False, help=_("Create csv file containing daily data.") ) parser.add_option( "--date", action="store", type="string", dest="date", default=StatsDateLib.getIsoFromEpoch( time.time() ), help=_("Decide end time of graphics. Usefull for testing.") ) parser.add_option("-f", "--fileType", action="store", type="string", dest="fileType", default='tx', help=_("Type of log files wanted.") ) parser.add_option( "--fixedPrevious", action="store_true", dest="fixedPrevious", default=False, help=_("Do not use floating weeks|days|months|years. Use previous fixed interval found.") ) parser.add_option( "--fixedCurrent", action="store_true", dest="fixedCurrent", default=False, help=_("Do not use floating weeks|days|months|years. Use current fixed interval found.") ) parser.add_option( "--includeGroups", action="store_true", dest="includeGroups", default=False, help=_("Include groups of all the specified machines or clusters." ) ) parser.add_option( "-l", "--language", action="store", type="string", dest="outputLanguage", default="", help = _("Language in which you want the casv file to be created in." ) ) parser.add_option( "--machines", action="store", type="string", dest="machines", default=LOCAL_MACHINE, help =_("Machines for wich you want to collect data." ) ) parser.add_option("--machinesAreClusters", action="store_true", dest = "machinesAreClusters", default=False, help=_("Specified machines are clusters.") ) parser.add_option("-m", "--monthly", action="store_true", dest = "monthly", default=False, help=_("Create csv file containing monthly data." ) ) parser.add_option("--turnOffLogging", action="store_true", dest = "turnOffLogging", default=False, help=_("Turn off the logger") ) parser.add_option("-w", "--weekly", action="store_true", dest = "weekly", default=False, help=_("Create csv file containing weekly data." ) ) parser.add_option("-y", "--yearly", action="store_true", dest = "yearly", default=False, help=_("Create csv file containing yearly data." ) )
def getStartAndEndTimeForPickleRecollection(): """ @summary : Gets the start time and the endTime of the pickle recollection from the user's input. @return : Returns the startTime and endTime. """ startTime = raw_input( "Enter the startTime of the pickle recollection ( yyyy-mm-dd hh:mm:ss) : ") while not StatsDateLib.isValidIsoDate( startTime ): print "Error. The entered date must be of the iso format." startTime = raw_input( "Enter the startTime of the pickle recollection ( yyyy-mm-dd hh:mm:ss) : ") endTime= raw_input( "Enter the endTime of the pickle recollection ( yyyy-mm-dd hh:mm:ss or 'now' for current time ) : ") while( str(endTime).lower() != "now" and not StatsDateLib.isValidIsoDate( endTime ) and ( StatsDateLib.isValidIsoDate( endTime ) and endTime<= startTime ) ) : if StatsDateLib.isValidIsoDate( endTime ) and endTime<= startTime : print "Error. End time must be after startTime( %s ). " elif StatsDateLib.isValidIsoDate( endTime ): print "Error. The entered date must be of the iso format." endTime= raw_input( "Enter the endTime of the pickle recollection ( yyyy-mm-dd hh:mm:ss or 'now' for current time ) : ") if endTime == "now" : endTime = StatsDateLib.getIsoFromEpoch( time.time() ) return startTime, endTime
def addOptions(parser): """ This method is used to add all available options to the option parser. """ parser.add_option("-c", "--clients", action="store", type="string", dest="clients", default=_("ALL"), help=_("Clients for wich we need to tranfer the data.")) parser.add_option("-e", "--end", action="store", type="string", dest="end", default=StatsDateLib.getIsoFromEpoch(time.time()), help=_("Decide ending time of the update.")) parser.add_option("-f", "--fileTypes", action="store", type="string", dest="fileTypes", default="", help=_("Specify the data type for each of the clients.")) parser.add_option( "-g", "--group", action="store", type="string", dest="group", default="", help= _("Transfer the combined data of all the specified clients/sources into a grouped database." )) parser.add_option("-m", "--machines", action="store", type="string", dest="machines", default=LOCAL_MACHINE, help=_("Specify on wich machine the clients reside.")) parser.add_option("-p", "--products", action="store", type="string", dest="products", default=_("ALL"), help=_("Specify wich product you are interested in."))
def askUserAboutUpdatingLogs(infos): """ @Summary : Asks user about whether or not he wants to update the log files on his machine. @returns True or False """ updateLofFiles = False os.system("clear") showPresentation() print "" print "" print "***************** Important note *****************" print "Collection or recollection of pickle files " print "is closely linked to the log files found on this machine." if StatsDateLib.getIsoWithRoundedHours( infos.picklesRecollectionStartTime ) != StatsDateLib.getIsoWithRoundedHours( StatsDateLib.getIsoFromEpoch(time.time())): print "Data recollection is set to take place up to the current hour." print "For the end of the recollection it is recommended that log file be updated." print "However, if the recollection spans over a long while and that the log file currently " print "on this machine are 'old' and cover the start of the recollection," print "updating log files might cause you to loose some or all of those old files." else: print "Data recollection is set to end PRIOR to the current hour." print "In this case, log file updates are usually useless." print "In the case where the span between the start of the recollection " print "is longer than the span covered by the currently accessible log files, " print "usefull log files will be lsot by updating them." print "However the opposite could also be true. If problems occured and " print "databases are seriously outdated, updating them will be the only solution " print "capable of making some or all the needed log file data accessible for pickling." print "" print "***Please review log files prior to specifying whether or not you want to update them or not.***" print "" input = raw_input("Do you want to update log files ? ( y or n ) : ") while (str(input).lower() != 'n' and str(input).lower() != 'y'): print "Please enter one of the following choices : y/Y or n/N." input = raw_input("Do you want to update log files ? ( y or n ) : ") if str(input).lower() == 'y': print "Log files will be updated." updateLofFiles = True else: print "Log files will not be updated." return updateLofFiles
def getLastUpdate( machine, client, fileType, currentDate, paths, collectUpToNow = False ): """ @summary : Reads and returns the client's or source's last update. @return : The client's or sources last update. """ times = {} lastUpdate = {} fileName = "%s%s_%s_%s" %( paths.STATSPICKLESTIMEOFUPDATES, fileType, client, machine ) if os.path.isfile( fileName ): try : fileHandle = open( fileName, "r" ) lastUpdate = pickle.load( fileHandle ) fileHandle.close() except: print _("problematic file in loading : %s") %fileName lastUpdate = StatsDateLib.getIsoWithRoundedHours( StatsDateLib.getIsoFromEpoch( StatsDateLib.getSecondsSinceEpoch(currentDate ) - StatsDateLib.HOUR) ) pass fileHandle.close() else:#create a new pickle file.Set start of the pickle as last update. if not os.path.isdir( os.path.dirname( fileName ) ) : os.makedirs( os.path.dirname( fileName ) ) fileHandle = open( fileName, "w" ) lastUpdate = StatsDateLib.getIsoWithRoundedHours( StatsDateLib.getIsoFromEpoch( StatsDateLib.getSecondsSinceEpoch(currentDate ) - StatsDateLib.HOUR) ) pickle.dump( lastUpdate, fileHandle ) fileHandle.close() return lastUpdate
def main(): """ @summary: Small test case to see if everything works fine """ statsConfig = StatsConfigParameters() statsConfig.getAllParameters() machineconfig = MachineConfigParameters() machineconfig.getParametersFromMachineConfigurationFile() currentTimeEpochFormat = time.time() -(120*60) endTime = StatsDateLib.getIsoWithRoundedHours( StatsDateLib.getIsoFromEpoch( currentTimeEpochFormat ) ) startTime = StatsDateLib.getIsoWithRoundedHours( StatsDateLib.getIsoFromEpoch( currentTimeEpochFormat -( StatsDateLib.DAY*7 ) ) ) print startTime, endTime groupName = statsConfig.groupParameters.groups[0] clients = statsConfig.groupParameters.groupsMembers[ groupName ] machines = statsConfig.groupParameters.groupsMachines[ groupName ] fileType = statsConfig.groupParameters.groupFileTypes[ groupName ] seperators = [startTime] seperators.extend( StatsDateLib.getSeparatorsWithStartTime( startTime = startTime , width=StatsDateLib.DAY*7, interval=StatsDateLib.HOUR )[:-1]) listOfFiles = PickleMerging.createMergedPicklesList( startTime, endTime, clients, groupName, fileType, machines, seperators ) listOfFileSizes = MemoryManagement.getListOfFileSizes(listOfFiles) currentFreeMemory = MemoryManagement.getCurrentFreeMemory(0.55555) if MemoryManagement.getTotalSizeListOfFiles( listOfFiles ) > currentFreeMemory: seperators = MemoryManagement.getSeperatorsForHourlyTreatments( startTime, endTime, currentFreeMemory, listOfFileSizes ) print seperators else: print "We have %s bytes free and the pickles require %s bytes" %( currentFreeMemory, getTotalSizeListOfFiles( listOfFiles ) ) print "we have enough memory to merge all these pickles."
def buildThisHoursFileName(client="someclient", offset=0, currentTime="", fileType="tx", machine="someMachineName"): """ @summary : Builds a filename using current currentTime. @Note : The format will be something like this : StatsPaths.STATSPICKLES/clientName/date/TXorRX//machine_hour Ex : StatsPaths.STATSPICKLES/clientName/20060707/tx/machinex_12:00:00 offset can be used to find a file from an hour close to the current one tempcurrentTime can also be used to build a filename from another hour. @warning :To be used only with pickles created hourly. """ timeFolder = "" if currentTime == "": currentTime = time.time() else: currentTime = StatsDateLib.getSecondsSinceEpoch(currentTime) currentTime = currentTime + (offset * StatsDateLib.HOUR) splitTime = time.gmtime(currentTime) for i in range(3): if int(splitTime[i]) < 10: timeFolder = timeFolder + "0" + str(splitTime[i]) else: timeFolder = timeFolder + str(splitTime[i]) hour = StatsDateLib.getHoursFromIso( StatsDateLib.getIsoFromEpoch(currentTime)) maxLt = (os.statvfs(STATSPATHS.STATSPICKLES)[statvfs.F_NAMEMAX]) fileName = ("%s" + "%." + str(maxLt) + "s/%s/%s/%." + str(maxLt) + "s_%s") % (STATSPATHS.STATSPICKLES, client, timeFolder, fileType, str(machine), str(hour)) return fileName
def getDirListToKeep( daysToKeep = 21 ): """ @summary : Gets the list of directories to keep. Based on daysToKeep parameter. @param : Number of past days to keep. Specified in daysToKeep. @return : List of directories to keep. """ dirlist = [] secondsSinceEpoch = time.time() for i in range( daysToKeep ): dirlist.append( StatsDateLib.getIsoFromEpoch( secondsSinceEpoch - ( i*60*60*24) ).split()[0].replace( '-','') ) return dirlist
def getStartEndOfWebPage(): """ Returns the time of the first graphics to be shown on the web page and the time of the last graphic to be displayed. """ currentTime = StatsDateLib.getIsoFromEpoch(time.time()) start = StatsDateLib.rewindXDays(currentTime, (NB_YEARS_DISPLAYED - 1) * 365) start = StatsDateLib.getIsoTodaysMidnight(start) end = StatsDateLib.getIsoTodaysMidnight(currentTime) return start, end
def getStartEndOfWebPage(): """ @summary : Returns the time of the first graphics to be shown on the web page and the time of the last graphic to be displayed. @return : Start,end tuple both in ISO format. """ currentTime = StatsDateLib.getIsoFromEpoch(time.time()) start = StatsDateLib.rewindXDays(currentTime, NB_DAYS_DISPLAYED - 1) start = StatsDateLib.getIsoTodaysMidnight(start) end = StatsDateLib.getIsoTodaysMidnight(currentTime) return start, end
def getStartEndOfWebPage(): """ @summary : Returns the time of the first graphics to be shown on the web page and the time of the last graphic to be displayed. @return : Start,end tuple both in ISO format. """ currentTime = StatsDateLib.getIsoFromEpoch( time.time() ) start = StatsDateLib.rewindXDays( currentTime, NB_DAYS_DISPLAYED - 1 ) start = StatsDateLib.getIsoTodaysMidnight( start ) end = StatsDateLib.getIsoTodaysMidnight( currentTime ) return start, end
def main(): """ @summary : This program is to be used to backup rrd databases and their corresponding time of update files. Backing up rrd databases at various point in time is a recommended paractice in case newly entered data is not valid. """ setGlobalLanguageParameters() timeToRestore = "2006-10-23 09:00:00" currentTime = time.time() currentTime = StatsDateLib.getIsoFromEpoch(currentTime) currentTime = StatsDateLib.getIsoWithRoundedSeconds(currentTime) currentTime = currentTime.replace(" ", "_") generalParameters = StatsConfigParameters() generalParameters.getAllParameters() if len(sys.argv) == 2: print sys.argv #try: timeToRestore = sys.argv[1] t = time.strptime( timeToRestore, '%Y-%m-%d %H:%M:%S') #will raise exception if format is wrong. split = timeToRestore.split() timeToRestore = "%s_%s" % (split[0], split[1]) # except: # print 'Date must be of the following format "YYYY-MM-DD HH:MM:SS"' # print "Program terminated." # sys.exit() restoreDatabaseUpdateTimes(timeToRestore, currentTime, generalParameters.nbDbBackupsToKeep) restoreDatabases(timeToRestore, currentTime, generalParameters.nbDbBackupsToKeep) else: print _("You must specify a date.") print _("Date must be of the folowing format YYYY-MM-DD HH:MM:SS") print _("Program terminated.")
def getStartEndOfWebPage(): """ Returns the time of the first graphics to be shown on the web page and the time of the last graphic to be displayed. """ currentTime = StatsDateLib.getIsoFromEpoch( time.time() ) start = StatsDateLib.rewindXDays( currentTime, ( NB_YEARS_DISPLAYED - 1 ) * 365 ) start = StatsDateLib.getIsoTodaysMidnight( start ) end = StatsDateLib.getIsoTodaysMidnight( currentTime ) return start, end
def filterClientsNamesUsingWilcardFilters(currentTime, timespan, clientNames, machines, fileTypes): """ @param currentTime: currentTime specified in the parameters. @param timespan: Time span specified within the parameters. @param clientNames:List of client names found in the parameters. """ newClientNames = [] end = currentTime start = StatsDateLib.getIsoFromEpoch( StatsDateLib.getSecondsSinceEpoch(currentTime) - 60 * 60 * timespan) if len(clientNames) >= len(fileTypes) or len(fileTypes) == 1: if len(fileTypes) == 1: for i in range(1, len(clientNames)): fileTypes.append(fileTypes[0]) for clientName, fileType in map(None, clientNames, fileTypes): if '?' in clientName or '*' in clientName: pattern = clientName rxHavingRun, txHavingRun = GeneralStatsLibraryMethods.getRxTxNamesHavingRunDuringPeriod( start, end, machines, pattern) if fileType == "rx": namesHavingrun = rxHavingRun else: namesHavingrun = txHavingRun newClientNames.extend(namesHavingrun) else: newClientNames.append(clientName) return newClientNames
def main(): """ @summary : This program is to be used to backup rrd databases and their corresponding time of update files. Backing up rrd databases at various point in time is a recommended paractice in case newly entered data is not valid. """ setGlobalLanguageParameters() timeToRestore = "2006-10-23 09:00:00" currentTime = time.time() currentTime = StatsDateLib.getIsoFromEpoch( currentTime ) currentTime = StatsDateLib.getIsoWithRoundedSeconds( currentTime ) currentTime = currentTime.replace(" ", "_") generalParameters = StatsConfigParameters() generalParameters.getAllParameters() if len( sys.argv ) == 2: print sys.argv #try: timeToRestore = sys.argv[1] t = time.strptime( timeToRestore, '%Y-%m-%d %H:%M:%S' )#will raise exception if format is wrong. split = timeToRestore.split() timeToRestore = "%s_%s" %( split[0], split[1] ) # except: # print 'Date must be of the following format "YYYY-MM-DD HH:MM:SS"' # print "Program terminated." # sys.exit() restoreDatabaseUpdateTimes( timeToRestore, currentTime, generalParameters.nbDbBackupsToKeep ) restoreDatabases( timeToRestore, currentTime, generalParameters.nbDbBackupsToKeep ) else: print _( "You must specify a date." ) print _( "Date must be of the folowing format YYYY-MM-DD HH:MM:SS" ) print _( "Program terminated." )
def addOptions( parser ): """ This method is used to add all available options to the option parser. """ parser.add_option("-c", "--clients", action="store", type="string", dest="clients", default=_("ALL"), help=_("Clients' names") ) parser.add_option( "--copy", action="store_true", dest = "copy", default=False, help=_("Create a copy file for the generated image.") ) parser.add_option("-d", "--daily", action="store_true", dest = "daily", default=False, help=_("Create daily graph(s).") ) parser.add_option( "--date", action="store", type="string", dest="date", default=StatsDateLib.getIsoFromEpoch( time.time() ), help=_("Decide end time of graphics. Usefull for testing.") ) parser.add_option("-f", "--fileType", action="store", type="string", dest="fileType", default='tx', help=_("Type of log files wanted.")) parser.add_option( "--fixedPrevious", action="store_true", dest="fixedPrevious", default=False, help=_("Do not use floating weeks|days|months|years. Use previous fixed interval found.")) parser.add_option( "--fixedCurrent", action="store_true", dest="fixedCurrent", default=False, help=_("Do not use floating weeks|days|months|years. Use current fixed interval found.")) parser.add_option( "--havingRun", action="store_true", dest="havingRun", default=False, help=_("Do not use only the currently running client/sources. Use all that have run between graphic(s) start and end instead.")) parser.add_option("-i", "--individual", action="store_true", dest = "individual", default=False, help=_("Dont combine data from specified machines. Create graphs for every machine independently") ) parser.add_option( "-l", "--language", action="store", type="string", dest="outputLanguage", default="", help = _("Language in which you want the graphic(s) details to be printed in.." ) ) parser.add_option("-m", "--monthly", action="store_true", dest = "monthly", default=False, help=_("Create monthly graph(s).") ) parser.add_option( "--machines", action="store", type="string", dest="machines", default=LOCAL_MACHINE, help = _("Machines for wich you want to collect data." ) ) parser.add_option("-s", "--span", action="store",type ="int", dest = "timespan", default=None, help=_("timespan( in hours) of the graphic.") ) parser.add_option("-t", "--types", type="string", dest="types", default=_("All"),help=_("Types of data to look for.") ) parser.add_option("--totals", action="store_true", dest = "totals", default=False, help=_("Create graphics based on the totals of all the values found for all specified clients or for a specific file type( tx, rx ).")) parser.add_option("--turnOffLogging", action="store_true", dest = "turnOffLogging", default=False, help=_("Turn off the logger")) parser.add_option("-w", "--weekly", action="store_true", dest = "weekly", default=False, help=_("Create weekly graph(s).")) parser.add_option("-y", "--yearly", action="store_true", dest = "yearly", default=False, help=_("Create yearly graph(s)."))
def buildThisHoursFileName( client = "someclient", offset = 0, currentTime = "", fileType = "tx", machine = "someMachineName" ): """ @summary : Builds a filename using current currentTime. @Note : The format will be something like this : StatsPaths.STATSPICKLES/clientName/date/TXorRX//machine_hour Ex : StatsPaths.STATSPICKLES/clientName/20060707/tx/machinex_12:00:00 offset can be used to find a file from an hour close to the current one tempcurrentTime can also be used to build a filename from another hour. @warning :To be used only with pickles created hourly. """ timeFolder = "" if currentTime == "": currentTime = time.time() else: currentTime = StatsDateLib.getSecondsSinceEpoch( currentTime ) currentTime = currentTime + ( offset * StatsDateLib.HOUR ) splitTime = time.gmtime( currentTime ) for i in range( 3 ): if int( splitTime[i] ) < 10 : timeFolder = timeFolder + "0" + str( splitTime[i] ) else: timeFolder = timeFolder + str( splitTime[i] ) hour = StatsDateLib.getHoursFromIso( StatsDateLib.getIsoFromEpoch( currentTime ) ) maxLt = ( os.statvfs( STATSPATHS.STATSPICKLES )[statvfs.F_NAMEMAX]) fileName = ( "%s" + "%." + str( maxLt ) + "s/%s/%s/%." + str( maxLt ) + "s_%s" ) %( STATSPATHS.STATSPICKLES, client, timeFolder, fileType, str(machine), str(hour) ) return fileName
def getParametersFromMonitoringConfigurationFile( self ): """ @summary : Gather all the parameters from the StatsPaths.STATSETC/config file. @return : All collected values in this order emails, machines, files, folders, maxUsages, errorsLogFile, maxSettingsFile. """ statsPaths = StatsPaths() statsPaths.setPaths() CONFIG = statsPaths.STATSETC +"monitoringConf" config = ConfigParser() if os.path.isfile( CONFIG ): file = open( CONFIG ) config.readfp( file ) self.emails = config.get( 'statsMonitoring', 'emails' ).split( ";" ) self.sender = config.get( 'statsMonitoring', 'sender' ) self.smtpServer = config.get( 'statsMonitoring', 'smtpServer' ) self.machines = config.get( 'statsMonitoring', 'machines' ).split( ";" ) self.files = config.get( 'statsMonitoring', 'files' ).split( ";" ) self.folders = config.get( 'statsMonitoring', 'folders' ).split( ";" ) self.maxUsages = config.get( 'statsMonitoring', 'maxUsages' ).split( ";" ) self.errorsLogFile = config.get( 'statsMonitoring', 'errorsLogFile' ) self.maxSettingsFile=config.get( 'statsMonitoring', 'maxSettingsFile' ) self.endTime = StatsDateLib.getIsoWithRoundedHours( StatsDateLib.getIsoFromEpoch( time.time() ) ) self.startTime = self.getPreviousMonitoringJob(self.endTime) self.maximumGaps = self.getMaximumGaps( ) self.updateMachineNamesBasedOnExistingMachineTags() try: file.close() except: pass else: #print "%s configuration file not present. Please restore file prior to running" %CONFIG raise Exception( "%s configuration file not present. Please restore file prior to running" %CONFIG )
def getDirListToKeep(daysToKeep=21): """ @summary : Gets the list of directories to keep. Based on daysToKeep parameter. @param : Number of past days to keep. Specified in daysToKeep. @return : List of directories to keep. """ dirlist = [] secondsSinceEpoch = time.time() for i in range(daysToKeep): dirlist.append( StatsDateLib.getIsoFromEpoch(secondsSinceEpoch - (i * 60 * 60 * 24)).split()[0].replace('-', '')) return dirlist
def setYears( self ): """ Returns the last 3 year numbers including the current year. """ currentTime = time.time() currentTime = StatsDateLib.getIsoFromEpoch( currentTime ) currentDate = datetime.date( int(currentTime[0:4]), int(currentTime[5:7]), 1 ) years = [] for i in range(0,3): year = currentDate.year - i newDate = StatsDateLib.getSecondsSinceEpoch( "%s-%s-%s 00:00:00" %(year, currentDate.month, currentDate.day) ) years.append( newDate ) years.reverse() self.years = years
def filterClientsNamesUsingWilcardFilters( currentTime, timespan, clientNames, machines, fileTypes ): """ @param currentTime: currentTime specified in the parameters. @param timespan: Time span specified within the parameters. @param clientNames:List of client names found in the parameters. """ newClientNames = [] end = currentTime start = StatsDateLib.getIsoFromEpoch( StatsDateLib.getSecondsSinceEpoch(currentTime)- 60*60*timespan ) if len(clientNames) >= len( fileTypes ) or len( fileTypes ) ==1: if len( fileTypes ) == 1 : for i in range(1, len( clientNames ) ): fileTypes.append( fileTypes[0]) for clientName,fileType in map( None, clientNames, fileTypes ): if '?' in clientName or '*' in clientName : pattern =clientName rxHavingRun,txHavingRun = GeneralStatsLibraryMethods.getRxTxNamesHavingRunDuringPeriod(start, end, machines, pattern) if fileType == "rx": namesHavingrun = rxHavingRun else: namesHavingrun = txHavingRun newClientNames.extend( namesHavingrun ) else: newClientNames.append( clientName ) return newClientNames
def getMissingWeeksBetweenUpdates(self, update1InIsoFormat, update2InIsoFormat ): """ @summary : Returns the list of days between update date 1 and update date 2. @Note : If update1InIsoFormat = 2008-02-28 15:00:00 and update2InIsoFormat = 2008-02-28 15:00:00 this method would return [ 2008-02-28 15:00:00 ] @return : Returns the list of days between update date 1 and update date 2. """ missingWeeks = [] if update2InIsoFormat > update1InIsoFormat: weekInIsoFormat = update1InIsoFormat while weekInIsoFormat <= update2InIsoFormat : missingWeeks.append( weekInIsoFormat ) weekInIsoFormat = StatsDateLib.getIsoFromEpoch( StatsDateLib.getSecondsSinceEpoch( weekInIsoFormat ) + ( StatsDateLib.DAY*7 ) ) return missingWeeks[:-1]
def getStartTimeAndEndTime( self, collectUptoNow = False ): """ @summary : Returns the startTime and endTime of the graphics. @warning : collectUptoNow not yet supported in program ! @return : the startTime and endTime of the graphics. """ #Now not yet implemented. if collectUptoNow == True : endTime = self.currentTime else : endTime = StatsDateLib.getIsoWithRoundedHours( self.currentTime ) startTime = StatsDateLib.getIsoFromEpoch( StatsDateLib.getSecondsSinceEpoch( endTime ) - (self.timespan * StatsDateLib.HOUR) ) return startTime, endTime
def __init__( self, clients, directories, types, startTimes,collectUpToNow, fileType,\ currentDate = '2005-06-27 13:15:00', interval = 1, hourlyPickling = True,\ machine = "" ): """ @summary Data structure used to contain all necessary info for a call to ClientStatsPickler. """ systemsCurrentDate = StatsDateLib.getIsoFromEpoch( time.time() ) self.clients = clients # Client for wich the job is done. self.machine = machine # Machine on wich update is made. self.types = types # Data types to collect ex:latency self.fileType = fileType # File type to use ex :tx,rx etc self.directories = directories # Get the directory containing files self.interval = interval # Interval. self.startTimes = startTimes # Time of last update.... self.currentDate = currentDate or systemsCurrentDate # Time of the update. self.collectUpToNow = collectUpToNow # Wheter or not we collect up to now or self.hourlyPickling = hourlyPickling # whether or not we create hourly pickles. self.endTime = self.currentDate # Will be currentDate if collectUpTo now is true, start of the current hour if not
def getStartAndEndTimeForPickleRecollection(): """ @summary : Gets the start time and the endTime of the pickle recollection from the user's input. @return : Returns the startTime and endTime. """ startTime = raw_input( "Enter the startTime of the pickle recollection ( yyyy-mm-dd hh:mm:ss) : " ) while not StatsDateLib.isValidIsoDate(startTime): print "Error. The entered date must be of the iso format." startTime = raw_input( "Enter the startTime of the pickle recollection ( yyyy-mm-dd hh:mm:ss) : " ) endTime = raw_input( "Enter the endTime of the pickle recollection ( yyyy-mm-dd hh:mm:ss or 'now' for current time ) : " ) while (str(endTime).lower() != "now" and not StatsDateLib.isValidIsoDate(endTime) and (StatsDateLib.isValidIsoDate(endTime) and endTime <= startTime)): if StatsDateLib.isValidIsoDate(endTime) and endTime <= startTime: print "Error. End time must be after startTime( %s ). " elif StatsDateLib.isValidIsoDate(endTime): print "Error. The entered date must be of the iso format." endTime = raw_input( "Enter the endTime of the pickle recollection ( yyyy-mm-dd hh:mm:ss or 'now' for current time ) : " ) if endTime == "now": endTime = StatsDateLib.getIsoFromEpoch(time.time()) return startTime, endTime
def getStartTimeAndEndTime(self, collectUptoNow=False): """ @summary : Returns the startTime and endTime of the graphics. @warning : collectUptoNow not yet supported in program ! @return : the startTime and endTime of the graphics. """ #Now not yet implemented. if collectUptoNow == True: endTime = self.currentTime else: endTime = StatsDateLib.getIsoWithRoundedHours(self.currentTime) startTime = StatsDateLib.getIsoFromEpoch( StatsDateLib.getSecondsSinceEpoch(endTime) - (self.timespan * StatsDateLib.HOUR)) return startTime, endTime
def addOptions( parser ): """ @summary : This method is used to add all available options to the option parser. """ parser.add_option("-c", "--clients", action="store", type="string", dest="clients", default="", help=_("Clients' names") ) parser.add_option( "--copy", action="store_true", dest = "copy", default=False, help=_("Create a copy file for the generated image.") ) parser.add_option( "--combineClients", action="store_true", dest = "combineClients", default=False, \ help=_("Combine the data of all client into a single graphics for each graphic type.") ) parser.add_option("-d", "--date", action="store", type="string", dest="currentTime", \ default=StatsDateLib.getIsoFromEpoch( time.time() ), help=_("Decide current time. Usefull for testing.") ) parser.add_option("-f", "--fileType", action="store", type="string", dest="fileType", default='tx',\ help=_("Type of log files wanted.") ) parser.add_option( "-g", "--groupName", action="store", type="string", dest="groupName", default="", help=_("Specify a name for the combined graphics of numerous client/sources. Note : requires the --combinedClients options to work." ) ) parser.add_option( "-m", "--machines", action="store", type="string", dest="machines", default=LOCAL_MACHINE,\ help = _("Machines for wich you want to collect data.") ) parser.add_option("-n", "--collectUpToNow", action="store_true", dest = "collectUpToNow", default=False, \ help=_("Collect data up to current second.") ) parser.add_option("-o", "--outputLanguage", action="store", type="string", dest="outputLanguage",\ default=LanguageTools.getMainApplicationLanguage(), help = _("Language in which the graphics are outputted.") ) parser.add_option("-p", "--products", action="store", type = "string", dest = "productTypes", default=_("All"), \ help=_("Specific product types to look for in the data collected.") ) parser.add_option("-s", "--span", action="store",type ="int", dest = "timespan", default=12, help=_("timespan( in hours) of the graphic.") ) parser.add_option("-t", "--types", type="string", dest="types", default=_("All"),help=_("Types of data to look for.") )
def updateGroupedRoundRobinDatabases(infos, logger=None): """ @summary : This method is to be used to update the database used to stored the merged data of a group. """ endTime = StatsDateLib.getSecondsSinceEpoch(infos.endTime) tempRRDFileName = RrdUtilities.buildRRDFileName( _("errors"), clients=infos.group, machines=infos.machines, fileType=infos.fileTypes[0]) startTime = RrdUtilities.getDatabaseTimeOfUpdate(tempRRDFileName, infos.fileTypes[0]) if startTime == 0: startTime = StatsDateLib.getSecondsSinceEpoch( StatsDateLib.getIsoTodaysMidnight(infos.endTime)) timeSeperators = getTimeSeperatorsBasedOnAvailableMemory( StatsDateLib.getIsoFromEpoch(startTime), StatsDateLib.getIsoFromEpoch(endTime), infos.clients, infos.fileTypes[0], infos.machines) #print timeSeperators for i in xrange(0, len(timeSeperators), 2): #timeseperators should always be coming in pairs startTime = StatsDateLib.getSecondsSinceEpoch(timeSeperators[i]) dataPairs = getPairs(infos.clients, infos.machines, infos.fileTypes[0], timeSeperators[i], timeSeperators[i + 1], infos.group, logger) for dataType in dataPairs: translatedDataType = LanguageTools.translateTerm( dataType, 'en', LanguageTools.getMainApplicationLanguage(), CURRENT_MODULE_ABS_PATH) rrdFileName = RrdUtilities.buildRRDFileName( dataType=translatedDataType, clients=infos.group, groupName=infos.group, machines=infos.machines, fileType=infos.fileTypes[0], usage="group") if not os.path.isfile(rrdFileName): createRoundRobinDatabase(rrdFileName, startTime, dataType) if endTime > startTime: j = 0 while dataPairs[dataType][j][0] < startTime and j < len( dataPairs[dataType]): #print "going over : %s startime was :%s" %(dataPairs[ dataType ][j][0], startTime) j = j + 1 for k in range(j, len(dataPairs[dataType])): #print "updating %s at %s" %(rrdFileName, int( dataPairs[ dataType ][k][0] )) try: rrdtool.update( rrdFileName, '%s:%s' % (int(dataPairs[dataType][k][0]), dataPairs[dataType][k][1])) except: if logger != None: try: logger.warning( "Could not update %s. Last update was more recent than %s " % (rrdFileName, int(dataPairs[dataType][k][0]))) except: pass pass else: #print "endTime %s was not bigger than start time %s" %( endTime, startTime ) if logger != None: try: logger.warning( _("This database was not updated since it's last update was more recent than specified date : %s" ) % rrdFileName) except: pass RrdUtilities.setDatabaseTimeOfUpdate(tempRRDFileName, infos.fileTypes[0], endTime)
def updateRoundRobinDatabases(client, machines, fileType, endTime, logger=None): """ @summary : This method updates every database linked to a certain client. @note : Database types are linked to the filetype associated with the client. """ combinedMachineName = "" combinedMachineName = combinedMachineName.join( [machine for machine in machines]) tempRRDFileName = RrdUtilities.buildRRDFileName(dataType=_("errors"), clients=[client], machines=machines, fileType=fileType) startTime = RrdUtilities.getDatabaseTimeOfUpdate(tempRRDFileName, fileType) if startTime == 0: startTime = StatsDateLib.getSecondsSinceEpoch( StatsDateLib.getIsoTodaysMidnight(endTime)) endTime = StatsDateLib.getSecondsSinceEpoch(endTime) timeSeperators = getTimeSeperatorsBasedOnAvailableMemory( StatsDateLib.getIsoFromEpoch(startTime), StatsDateLib.getIsoFromEpoch(endTime), [client], fileType, machines) for i in xrange(len(timeSeperators) - 1): dataPairs = getPairs([client], machines, fileType, timeSeperators[i], timeSeperators[i + 1], groupName="", logger=logger) for dataType in dataPairs: translatedDataType = LanguageTools.translateTerm( dataType, 'en', LanguageTools.getMainApplicationLanguage(), CURRENT_MODULE_ABS_PATH) rrdFileName = RrdUtilities.buildRRDFileName( dataType=translatedDataType, clients=[client], machines=machines, fileType=fileType) if not os.path.isfile(rrdFileName): createRoundRobinDatabase(databaseName=rrdFileName, startTime=startTime, dataType=dataType) if endTime > startTime: j = 0 while dataPairs[dataType][j][0] < startTime: j = j + 1 for k in range(j, len(dataPairs[dataType])): try: rrdtool.update( rrdFileName, '%s:%s' % (int(dataPairs[dataType][k][0]), dataPairs[dataType][k][1])) except: if logger != None: try: logger.warning( "Could not update %s. Last update was more recent than %s " % (rrdFileName, int(dataPairs[dataType][k][0]))) except: pass pass if logger != None: try: logger.info( _("Updated %s db for %s in db named : %s") % (dataType, client, rrdFileName)) except: pass else: if logger != None: try: logger.warning( _("This database was not updated since it's last update was more recent than specified date : %s" ) % rrdFileName) except: pass RrdUtilities.setDatabaseTimeOfUpdate(rrdFileName, fileType, endTime)
def addOptions(parser): """ @summary: This method is used to add all available options to the option parser. """ parser.add_option("-c", "--clients", action="store", type="string", dest="clients", default="ALL", help=_("Clients' names")) parser.add_option("-d", "--daily", action="store_true", dest="daily", default=False, help=_("Create csv file containing daily data.")) parser.add_option( "--date", action="store", type="string", dest="date", default=StatsDateLib.getIsoFromEpoch(time.time()), help=_("Decide end time of graphics. Usefull for testing.")) parser.add_option("-f", "--fileType", action="store", type="string", dest="fileType", default='tx', help=_("Type of log files wanted.")) parser.add_option( "--fixedPrevious", action="store_true", dest="fixedPrevious", default=False, help= _("Do not use floating weeks|days|months|years. Use previous fixed interval found." )) parser.add_option( "--fixedCurrent", action="store_true", dest="fixedCurrent", default=False, help= _("Do not use floating weeks|days|months|years. Use current fixed interval found." )) parser.add_option( "--includeGroups", action="store_true", dest="includeGroups", default=False, help=_("Include groups of all the specified machines or clusters.")) parser.add_option( "-l", "--language", action="store", type="string", dest="outputLanguage", default="", help=_("Language in which you want the casv file to be created in.")) parser.add_option("--machines", action="store", type="string", dest="machines", default=LOCAL_MACHINE, help=_("Machines for wich you want to collect data.")) parser.add_option("--machinesAreClusters", action="store_true", dest="machinesAreClusters", default=False, help=_("Specified machines are clusters.")) parser.add_option("-m", "--monthly", action="store_true", dest="monthly", default=False, help=_("Create csv file containing monthly data.")) parser.add_option("--turnOffLogging", action="store_true", dest="turnOffLogging", default=False, help=_("Turn off the logger")) parser.add_option("-w", "--weekly", action="store_true", dest="weekly", default=False, help=_("Create csv file containing weekly data.")) parser.add_option("-y", "--yearly", action="store_true", dest="yearly", default=False, help=_("Create csv file containing yearly data."))
def getOptionsFromParser(parser): """ @summary: Parses and validates the options found in the parser. @return: If information was found to be valid, return options """ infos = None date = [] (options, args) = parser.parse_args() machines = options.machines.replace(' ', '').split(',') date = options.date.replace('"', '').replace("'", '') fileType = options.fileType.replace("'", '') daily = options.daily weekly = options.weekly monthly = options.monthly yearly = options.yearly fixedCurrent = options.fixedCurrent fixedPrevious = options.fixedPrevious turnOffLogging = options.turnOffLogging includeGroups = options.includeGroups machinesAreClusters = options.machinesAreClusters outputLanguage = options.outputLanguage if fixedPrevious and fixedCurrent: print _( "Error. Please use only one of the fixed options,either fixedPrevious or fixedCurrent. " ) print _("Use -h for help.") print _("Program terminated.") sys.exit() counter = 0 specialParameters = [daily, monthly, weekly, yearly] for specialParameter in specialParameters: if specialParameter: counter = counter + 1 if counter > 1: print _( "Error. Only one of the daily, weekly and yearly options can be use at a time " ) print _("Use -h for help.") print _("Program terminated.") sys.exit() elif counter == 0: print _("Error. Please use either the -d -m -w or -y options. ") print _("Use -h for help.") print _("Program terminated.") sys.exit() try: # Makes sure date is of valid format. # Makes sure only one space is kept between date and hour. t = time.strptime(date, '%Y-%m-%d %H:%M:%S') split = date.split() date = "%s %s" % (split[0], split[1]) except: print _("Error. The date format must be YYYY-MM-DD HH:MM:SS") print _("Use -h for help.") print _("Program terminated.") sys.exit() #TODO :fixStartEnd method??? if fixedPrevious: if daily: span = "daily" graphicType = "daily" start, end = StatsDateLib.getStartEndFromPreviousDay(date) elif weekly: span = "weekly" graphicType = "weekly" start, end = StatsDateLib.getStartEndFromPreviousWeek(date) elif monthly: span = "monthly" graphicType = "monthly" start, end = StatsDateLib.getStartEndFromPreviousMonth(date) elif yearly: span = "yearly" graphicType = "yearly" start, end = StatsDateLib.getStartEndFromPreviousYear(date) timeSpan = int( StatsDateLib.getSecondsSinceEpoch(end) - StatsDateLib.getSecondsSinceEpoch(start)) / 3600 elif fixedCurrent: if daily: span = "daily" graphicType = "daily" start, end = StatsDateLib.getStartEndFromCurrentDay(date) elif weekly: span = "weekly" graphicType = "weekly" start, end = StatsDateLib.getStartEndFromCurrentWeek(date) elif monthly: span = "monthly" graphicType = "monthly" start, end = StatsDateLib.getStartEndFromCurrentMonth(date) elif yearly: span = "yearly" graphicType = "yearly" start, end = StatsDateLib.getStartEndFromCurrentYear(date) timeSpan = int( StatsDateLib.getSecondsSinceEpoch(end) - StatsDateLib.getSecondsSinceEpoch(start)) / 3600 else: #TODO fix span method??? if daily: timeSpan = 24 graphicType = "daily" span = "daily" elif weekly: timeSpan = 24 * 7 graphicType = "weekly" span = "weekly" elif monthly: timeSpan = 24 * 30 graphicType = "monthly" span = "monthly" elif yearly: timeSpan = 24 * 365 graphicType = "yearly" span = "yearly" start = StatsDateLib.getIsoFromEpoch( StatsDateLib.getSecondsSinceEpoch(date) - timeSpan * 60 * 60) end = date if fileType != "tx" and fileType != "rx": print _("Error. File type must be either tx or rx.") print _('Multiple types are not accepted.') print _("Use -h for additional help.") print _("Program terminated.") sys.exit() if includeGroups == True: configParameters = StatsConfigParameters() configParameters.getAllParameters() groups = configParameters.groupParameters.groups machinesToSearch = machines[:] #Forces a copy and nota reference. for machine in machines: if machinesAreClusters == True: machineConfig = MachineConfigParameters() machineConfig.getParametersFromMachineConfigurationFile() machinesAssociatedWith = machineConfig.getMachinesAssociatedWith( machine) machinesToTest = str(machinesAssociatedWith).replace('[','').replace(']', '').replace(',','').replace( "'",'' ).replace('"','' ).\ replace(" ",'').replace('[','').replace(']', '').replace(',','').replace( "'",'' ).replace('"','' ).replace(" ",'') for group in groups: groupsMachine = str( configParameters.groupParameters.groupsMachines[group] ).replace('[','').replace(']', '').\ replace(',','').replace( "'",'' ).replace('"','' ).replace(" ",'') #print "machinesToTest %s groupsMachine %s" %(machinesToTest,groupsMachine ) if machinesToTest in groupsMachine: if groupsMachine not in machinesToSearch: machinesToSearch.append(groupsMachine) #print machines infos = _CsvInfos( start = start , end = end , span = span, timeSpan = timeSpan, fileType = fileType, machinesForLabels = machines,\ machinesToSearch = machinesToSearch, machinesAreClusters = machinesAreClusters, dataSource = "databases", outputLanguage = outputLanguage ) return infos
def main(): """ @summary : Gets all the parameters from config file. Updates pickle files. Generates all the required graphics. Generates therwuired csv files. Updates the different web pages. Updates the desired databases. Uploads graphics to the required machines. Monitors the result of all the activities. """ if GeneralStatsLibraryMethods.processIsAlreadyRunning( "pxStatsStartup") == False: setGlobalLanguageParameters() GeneralStatsLibraryMethods.createLockFile("pxStatsStartup") currentTime = time.time() currentTimeInIsoFormat = StatsDateLib.getIsoFromEpoch(currentTime) generalParameters = StatsConfigParameters() generalParameters.getAllParameters() machineParameters = MachineConfigParameters() machineParameters.getParametersFromMachineConfigurationFile() validateParameters(generalParameters, machineParameters, None) tagsNeedingUpdates = getMachinesTagsNeedingUpdates( generalParameters, machineParameters) if tagsNeedingUpdates == None: #no previous parameter found saveCurrentMachineParameters(machineParameters) elif tagsNeedingUpdates != []: updateFilesAssociatedWithMachineTags(tagsNeedingUpdates, machineParameters) saveCurrentMachineParameters(machineParameters) updatePickles(generalParameters, machineParameters, currentTimeInIsoFormat) updateDatabases(generalParameters, machineParameters, currentTimeInIsoFormat) backupRRDDatabases(generalParameters, currentTimeInIsoFormat, generalParameters.nbDbBackupsToKeep) #updateCsvFiles( ) getGraphicsForWebPages(generalParameters, currentTimeInIsoFormat) updateWebPages(generalParameters) #uploadGraphicFiles( generalParameters, machineParameters ) cleanUp(generalParameters, currentTimeInIsoFormat, generalParameters.daysOfPicklesToKeep) monitorActivities(generalParameters, currentTimeInIsoFormat) updateManager = AutomaticUpdatesManager( generalParameters.nbAutoUpdatesLogsToKeep, "pxStatsStartup") updateManager.addAutomaticUpdateToLogs(currentTimeInIsoFormat) GeneralStatsLibraryMethods.deleteLockFile("pxStatsStartup") print _("Finished.") else: print _( "Error. An other instance of pxStatsStartup is allready running.") print _("Only one instance of this software can be run at once.") print _( "Please terminate the other instance or wait for it to end it's execution" ) print _("before running this program again.") print _("Program terminated.") sys.exit()
def getStartAndEndTimeForDatabaseRecollection(infos): """ @summary : Gets the start time and the endTime of the pickle recollection from the user's input. @param infos : Previously gathered infos. @note : If pickles are to be recollected, infos must contain the pickles recollection start time and end time. @return : Returns the startTime and endTime. """ if infos.pickles == True: isCertainAboutStartTime = False #************************startTime section********* while isCertainAboutStartTime == False: startTime = raw_input( "Enter the startTime of the dataBase recollection ( yyyy-mm-dd hh:mm:ss ) : " ) while not StatsDateLib.isValidIsoDate(startTime): if not StatsDateLib.isValidIsoDate(startTime): print "Error. The entered date must be of the iso format." startTime = raw_input( "Enter the startTime of the pickle recollection ( yyyy-mm-dd hh:mm:ss ) : " ) if (StatsDateLib.isValidIsoDate(startTime) and startTime > infos.picklesRecollectionStartTime): print "Warning : StartTime of database recollection ( %s ) is after startTime of pickleRecollection( %s ) " % ( startTime, infos.picklesRecollectionStartTime) isCertainAnswer = raw_input( "Are you sure you want to keep this date ? ( y or n ) : ") while (str(isCertainAnswer).lower() != 'y' and str(isCertainAnswer).lower() != 'n'): print "Error.Answer needs to be either y or n." isCertainAnswer = raw_input( "Are you sure you want to keep this date ? ( y or n ) : " ) if str(isCertainAnswer).lower() == 'y': isCertainAboutStartTime = True else: print "A new startTime will be required." else: isCertainAboutStartTime = True #************************endTime section********* isCertainAboutEndTime = False while isCertainAboutEndTime == False: endTime = raw_input( "Enter the endTime of the dataBase recollection ( yyyy-mm-dd hh:mm:ss or 'now' for current time ) : " ) while (not StatsDateLib.isValidIsoDate(endTime) and str(endTime).lower() != "now"): if not StatsDateLib.isValidIsoDate(endTime): print "Error. The entered date must be of the iso format or now." endTime = raw_input( "Enter the endTime of the pickle recollection ( yyyy-mm-dd hh:mm:ss or 'now' for current time ) : " ) if (endTime != "now" and StatsDateLib.isValidIsoDate(endTime) and endTime < infos.picklesRecollectionEndTime): print "Warning : endTime of database recollection ( %s ) is before the endTime of pickleRecollection( %s ) " % ( startTime, infos.picklesRecollectionStartTime) isCertainAnswer = raw_input( "Are you sure you want to keep this date ? ( y or n ) : ") while (str(isCertainAnswer).lower() != 'y' and str(isCertainAnswer).lower() != 'n'): print "Error.Answer needs to be either y or n." isCertainAnswer = raw_input( "Are you sure you want to keep this date ? ( y or n ) : " ) if str(isCertainAnswer).lower() == 'y': isCertainAboutEndTime = True else: print "A new endTime will be required." else: isCertainAboutEndTime = True if endTime == "now": endTime = StatsDateLib.getIsoFromEpoch(time.time()) isCertainAboutEndTime = True else: startTime = raw_input( "Enter the startTime of the dataBase recollection ( yyyy-mm-dd hh:mm:ss ) : " ) while not StatsDateLib.isValidIsoDate(startTime): if not StatsDateLib.isValidIsoDate(startTime): print "Error. The entered date must be of the iso format." startTime = raw_input( "Enter the startTime of the pickle recollection ( yyyy-mm-dd hh:mm:ss) : " ) endTime = raw_input( "Enter the endTime of the dataBase recollection ( yyyy-mm-dd hh:mm:ss or 'now' for current time ) : " ) while (str(endTime) != "now" and not StatsDateLib.isValidIsoDate(endTime)): if not StatsDateLib.isValidIsoDate(endTime): print "Error. The entered date must be of the iso format." endTime = raw_input( "Enter the startTime of the pickle recollection ( yyyy-mm-dd hh:mm:ss or 'now' for current time ) : " ) if endTime == "now": endTime = StatsDateLib.getIsoFromEpoch(time.time()) return startTime, endTime
def askUserAboutUpdatingLogs( infos ): """ @Summary : Asks user about whether or not he wants to update the log files on his machine. @returns True or False """ updateLofFiles = False os.system( "clear" ) showPresentation() print "" print "" print "***************** Important note *****************" print "Collection or recollection of pickle files " print "is closely linked to the log files found on this machine." if StatsDateLib.getIsoWithRoundedHours( infos.picklesRecollectionStartTime ) != StatsDateLib.getIsoWithRoundedHours( StatsDateLib.getIsoFromEpoch( time.time() )) : print "Data recollection is set to take place up to the current hour." print "For the end of the recollection it is recommended that log file be updated." print "However, if the recollection spans over a long while and that the log file currently " print "on this machine are 'old' and cover the start of the recollection," print "updating log files might cause you to loose some or all of those old files." else : print "Data recollection is set to end PRIOR to the current hour." print "In this case, log file updates are usually useless." print "In the case where the span between the start of the recollection " print "is longer than the span covered by the currently accessible log files, " print "usefull log files will be lsot by updating them." print "However the opposite could also be true. If problems occured and " print "databases are seriously outdated, updating them will be the only solution " print "capable of making some or all the needed log file data accessible for pickling." print "" print "***Please review log files prior to specifying whether or not you want to update them or not.***" print "" input = raw_input( "Do you want to update log files ? ( y or n ) : " ) while ( str( input ).lower() != 'n' and str( input ).lower() != 'y' ): print "Please enter one of the following choices : y/Y or n/N." input = raw_input( "Do you want to update log files ? ( y or n ) : " ) if str( input ).lower() == 'y' : print "Log files will be updated." updateLofFiles = True else: print "Log files will not be updated." return updateLofFiles
def mergePicklesFromSameHour( logger = None , pickleNames = None, mergedPickleName = "",\ clientName = "" , combinedMachineName = "", currentTime = "",\ fileType = "tx" ): """ @summary: This methods receives a list of filenames referring to pickled FileStatsEntries. After the merger pickles get saved since they might be reused somewhere else. @precondition: Pickle should be of the same timespan and bucket width. If not no merging will occur. """ if logger != None: logger.debug(_("Call to mergePickles received.")) logging = True else: logging = False entryList = [] for pickle in pickleNames: #for every pickle we eneed to merge if os.path.isfile(pickle): entryList.append(CpickleWrapper.load(pickle)) else: #Use empty entry if there is no existing pickle of that name endTime = StatsDateLib.getIsoFromEpoch( StatsDateLib.getSecondsSinceEpoch(currentTime) + StatsDateLib.HOUR) entryList.append( FileStatsCollector(startTime=currentTime, endTime=endTime, logger=logger, logging=logging)) if logger != None: logger.warning( _("Pickle named %s did not exist. Empty entry was used instead." ) % pickle) #start off with a carbon copy of first pickle in list. newFSC = FileStatsCollector( files = entryList[0].files , statsTypes = entryList[0].statsTypes, startTime = entryList[0].startTime,\ endTime = entryList[0].endTime, interval=entryList[0].interval, totalWidth = entryList[0].totalWidth,\ firstFilledEntry = entryList[0].firstFilledEntry, lastFilledEntry = entryList[0].lastFilledEntry,\ maxLatency = entryList[0].maxLatency, fileEntries = entryList[0].fileEntries,logger = logger,\ logging = logging ) if PickleMerging.entryListIsValid(entryList) == True: for i in range(1, len(entryList)): #add other entries for file in entryList[i].files: if file not in newFSC.files: newFSC.files.append(file) for j in range(len(newFSC.fileEntries)): # add all entries newFSC.fileEntries[j].values.productTypes.extend( entryList[i].fileEntries[j].values.productTypes) newFSC.fileEntries[j].files.extend( entryList[i].fileEntries[j].files) newFSC.fileEntries[j].times.extend( entryList[i].fileEntries[j].times) newFSC.fileEntries[j].nbFiles = newFSC.fileEntries[ j].nbFiles + (newFSC.fileEntries[j].nbFiles) for type in newFSC.statsTypes: newFSC.fileEntries[j].values.dictionary[type].extend( entryList[i].fileEntries[j].values.dictionary[type] ) newFSC.fileEntries[j].values.rows = newFSC.fileEntries[ j].values.rows + entryList[i].fileEntries[j].values.rows newFSC = newFSC.setMinMaxMeanMedians( startingBucket=0, finishingBucket=newFSC.nbEntries - 1) else: #Did not merge pickles named. Pickle list was not valid." if logger != None: logger.warning( _("Did not merge pickles named : %s. Pickle list was not valid." ) % pickleNames) logger.warning( _("Filled with empty entries instead.") % pickleNames) newFSC.fileEntries = PickleMerging.fillWithEmptyEntries( nbEmptyEntries=60, entries={}) #prevents us from having ro remerge file later on. temp = newFSC.logger del newFSC.logger CpickleWrapper.save(newFSC, mergedPickleName) try: os.chmod(mergedPickleName, 0777) except: pass #print "saved :%s" %mergedPickleName newFSC.logger = temp return newFSC
def addOptions( parser ): """ @summary : This method is used to add all available options to the option parser. """ parser.add_option( "-c", "--clients", action="store", type="string", dest="clients", default=_("All"), help= _("Clients' names") ) parser.add_option( "-d", "--date", action="store", type="string", dest="currentDate", default=StatsDateLib.getIsoFromEpoch( time.time() ),\ help= _("Decide current time. Usefull for testing.") ) parser.add_option( "-i", "--interval", type="int", dest="interval", default=1, help=_("Interval (in minutes) for which a point will be calculated. Will 'smooth' the graph") ) parser.add_option( "-f", "--fileType", action="store", type="string", dest="fileType", default='tx', help=_("Type of log files wanted.") ) parser.add_option( "-m", "--machine", action="store", type="string", dest="machine", default=LOCAL_MACHINE, help = _("Machine for wich we are running the update.") ) parser.add_option( "-n", "--now", action="store_true", dest = "collectUpToNow", default=False, help=_("Collect data up to current second.") ) parser.add_option( "-t", "--types", type="string", dest="types", default=_("All"), help=_("Types of data to look for.") )
def getStartAndEndTimeForDatabaseRecollection( infos ): """ @summary : Gets the start time and the endTime of the pickle recollection from the user's input. @param infos : Previously gathered infos. @note : If pickles are to be recollected, infos must contain the pickles recollection start time and end time. @return : Returns the startTime and endTime. """ if infos.pickles == True : isCertainAboutStartTime = False #************************startTime section********* while isCertainAboutStartTime == False: startTime = raw_input( "Enter the startTime of the dataBase recollection ( yyyy-mm-dd hh:mm:ss ) : ") while not StatsDateLib.isValidIsoDate( startTime ) : if not StatsDateLib.isValidIsoDate( startTime ): print "Error. The entered date must be of the iso format." startTime = raw_input( "Enter the startTime of the pickle recollection ( yyyy-mm-dd hh:mm:ss ) : ") if ( StatsDateLib.isValidIsoDate( startTime ) and startTime > infos.picklesRecollectionStartTime ) : print "Warning : StartTime of database recollection ( %s ) is after startTime of pickleRecollection( %s ) " %( startTime, infos.picklesRecollectionStartTime) isCertainAnswer = raw_input( "Are you sure you want to keep this date ? ( y or n ) : ") while( str(isCertainAnswer).lower() != 'y' and str(isCertainAnswer).lower() != 'n'): print "Error.Answer needs to be either y or n." isCertainAnswer = raw_input( "Are you sure you want to keep this date ? ( y or n ) : ") if str(isCertainAnswer).lower() == 'y': isCertainAboutStartTime = True else: print "A new startTime will be required." else: isCertainAboutStartTime = True #************************endTime section********* isCertainAboutEndTime = False while isCertainAboutEndTime == False: endTime = raw_input( "Enter the endTime of the dataBase recollection ( yyyy-mm-dd hh:mm:ss or 'now' for current time ) : ") while ( not StatsDateLib.isValidIsoDate( endTime ) and str(endTime).lower() != "now" ): if not StatsDateLib.isValidIsoDate( endTime ): print "Error. The entered date must be of the iso format or now." endTime = raw_input( "Enter the endTime of the pickle recollection ( yyyy-mm-dd hh:mm:ss or 'now' for current time ) : ") if ( endTime != "now" and StatsDateLib.isValidIsoDate( endTime ) and endTime < infos.picklesRecollectionEndTime ) : print "Warning : endTime of database recollection ( %s ) is before the endTime of pickleRecollection( %s ) " %( startTime, infos.picklesRecollectionStartTime) isCertainAnswer = raw_input( "Are you sure you want to keep this date ? ( y or n ) : ") while( str(isCertainAnswer).lower() != 'y' and str(isCertainAnswer).lower() != 'n'): print "Error.Answer needs to be either y or n." isCertainAnswer = raw_input( "Are you sure you want to keep this date ? ( y or n ) : ") if str(isCertainAnswer).lower() == 'y': isCertainAboutEndTime = True else: print "A new endTime will be required." else: isCertainAboutEndTime = True if endTime == "now" : endTime = StatsDateLib.getIsoFromEpoch( time.time() ) isCertainAboutEndTime = True else: startTime = raw_input( "Enter the startTime of the dataBase recollection ( yyyy-mm-dd hh:mm:ss ) : ") while not StatsDateLib.isValidIsoDate( startTime ) : if not StatsDateLib.isValidIsoDate( startTime ): print "Error. The entered date must be of the iso format." startTime = raw_input( "Enter the startTime of the pickle recollection ( yyyy-mm-dd hh:mm:ss) : ") endTime = raw_input( "Enter the endTime of the dataBase recollection ( yyyy-mm-dd hh:mm:ss or 'now' for current time ) : ") while ( str(endTime) != "now" and not StatsDateLib.isValidIsoDate( endTime ) ): if not StatsDateLib.isValidIsoDate( endTime ): print "Error. The entered date must be of the iso format." endTime = raw_input( "Enter the startTime of the pickle recollection ( yyyy-mm-dd hh:mm:ss or 'now' for current time ) : ") if endTime == "now" : endTime = StatsDateLib.getIsoFromEpoch( time.time() ) return startTime, endTime
def updateHourlyPickles( infos, paths, logger = None ): """ @summary : This method is to be used when hourly pickling is done. -1 pickle per hour per client. This method needs will update the pickles by collecting data from the time of the last pickle up to the current date.(System time or the one specified by the user.) If for some reason data wasnt collected for one or more hour since last pickle,pickles for the missing hours will be created and filled with data. If no entries are found for this client in the pickled-times file, we take for granted that this is a new client. In that case data will be collected from the top of the hour up to the time of the call. If new client has been producing data before the day of the first call, user can specify a different time than system time to specify the first day to pickle. He can then call this method with the current system time, and data between first day and current time will be collected so that pickling can continue like the other clients can. """ sp = StatsPickler( logger = logger ) pathToLogFiles = GeneralStatsLibraryMethods.getPathToLogFiles( LOCAL_MACHINE, infos.machine ) for i in range( len (infos.clients) ) : sp.client = infos.clients[i] width = StatsDateLib.getSecondsSinceEpoch(infos.endTime) - StatsDateLib.getSecondsSinceEpoch( StatsDateLib.getIsoWithRoundedHours(infos.startTimes[i] ) ) if width > StatsDateLib.HOUR :#In case pickling didnt happen for a few hours for some reason... hours = [infos.startTimes[i]] hours.extend( StatsDateLib.getSeparatorsWithStartTime( infos.startTimes[i], interval = StatsDateLib.HOUR, width = width )) for j in range( len(hours)-1 ): #Covers hours where no pickling was done. startOfTheHour = StatsDateLib.getIsoWithRoundedHours( hours[j] ) startTime = startOfTheHour endTime = StatsDateLib.getIsoFromEpoch( StatsDateLib.getSecondsSinceEpoch( StatsDateLib.getIsoWithRoundedHours(hours[j+1] ) )) #print " client : %s startTime : %s endTime : %s" %(infos.clients[i], startTime, endTime ) if startTime >= endTime and logger != None : try: logger.warning( _("Startime used in updateHourlyPickles was greater or equal to end time.") ) except: pass sp.pickleName = StatsPickler.buildThisHoursFileName( client = infos.clients[i], currentTime = startOfTheHour, machine = infos.machine, fileType = infos.fileType ) sp.collectStats( types = infos.types, startTime = startTime , endTime = endTime, interval = infos.interval * StatsDateLib.MINUTE,\ directory = pathToLogFiles, fileType = infos.fileType ) else: startTime = infos.startTimes[i] endTime = infos.endTime startOfTheHour = StatsDateLib.getIsoWithRoundedHours( infos.startTimes[i] ) #print " client : %s startTime : %s endTime : %s" %(infos.clients[i], startTime, endTime ) if startTime >= endTime and logger != None :#to be removed try: logger.warning( _("Startime used in updateHourlyPickles was greater or equal to end time.") ) except: pass sp.pickleName = StatsPickler.buildThisHoursFileName( client = infos.clients[i], currentTime = startOfTheHour, machine = infos.machine, fileType = infos.fileType ) sp.collectStats( infos.types, startTime = startTime, endTime = endTime, interval = infos.interval * StatsDateLib.MINUTE, directory = pathToLogFiles, fileType = infos.fileType ) setLastUpdate( machine = infos.machine, client = infos.clients[i], fileType = infos.fileType, currentDate = infos.currentDate, paths = paths, collectUpToNow = infos.collectUpToNow )
def getSeperatorsForHourlyTreatments(startTime, endTime, currentFreeMemory, fileSizesPerHour, usage="rrd"): """ @summary : returns a list of time seperators based on a list of file and the current amount of free memory. Each seperator represents the time associated with a certain hourly file. Each seperator will represent the maximum amount of files that can be treated at the same time without busting the current memory. @attention: List fo files MUST refer to hourly files. @param startTime: Startime in iso format of the interval to work with. @param endTime: End time in iso format of the interval to work with. @param currentFreeMemory: Maximum amout of memory to use per seperation. @param fileSizesPerHour: size of the file(s) to be treated at every hour. @return: Returns the time seperators. """ currentTotalFileSizes = 0 currentTime = StatsDateLib.getSecondsSinceEpoch(startTime) seperators = [startTime] if fileSizesPerHour[0] < currentFreeMemory: for fileSizePerHour in fileSizesPerHour: currentTotalFileSizes = currentTotalFileSizes + fileSizePerHour if currentFreeMemory < currentTotalFileSizes: seperators.append( StatsDateLib.getIsoFromEpoch(currentTime)) currentTotalFileSizes = 0 currentTime = currentTime + StatsDateLib.HOUR else: raise Exception( "Cannot build seperators. First file will not even fit within current available memory." ) if seperators[len(seperators) - 1] != endTime: seperators.append(endTime) if len(seperators) > 2: #If any "in between seperators were added" i = 1 currentLength = len(seperators) - 1 while i < currentLength: #add 1 minute if usage == "rrd": seperators.insert( i + 1, StatsDateLib.getIsoFromEpoch( (StatsDateLib.getSecondsSinceEpoch(seperators[i]) + StatsDateLib.MINUTE))) else: seperators.insert( i + 1, StatsDateLib.getSecondsSinceEpoch(seperators[i])) currentLength = currentLength + 1 i = i + 2 return seperators
def getOptionsFromParser( parser ): """ @summary: Parses and validates the options found in the parser. @return: If information was found to be valid, return options """ infos = None date = [] ( options, args )= parser.parse_args() machines = options.machines.replace( ' ','').split(',') date = options.date.replace('"','').replace("'",'') fileType = options.fileType.replace("'",'') daily = options.daily weekly = options.weekly monthly = options.monthly yearly = options.yearly fixedCurrent = options.fixedCurrent fixedPrevious = options.fixedPrevious turnOffLogging = options.turnOffLogging includeGroups = options.includeGroups machinesAreClusters = options.machinesAreClusters outputLanguage = options.outputLanguage if fixedPrevious and fixedCurrent: print _("Error. Please use only one of the fixed options,either fixedPrevious or fixedCurrent. " ) print _("Use -h for help.") print _("Program terminated.") sys.exit() counter = 0 specialParameters = [daily, monthly, weekly, yearly] for specialParameter in specialParameters: if specialParameter: counter = counter + 1 if counter > 1 : print _( "Error. Only one of the daily, weekly and yearly options can be use at a time " ) print _( "Use -h for help." ) print _( "Program terminated." ) sys.exit() elif counter == 0: print _( "Error. Please use either the -d -m -w or -y options. " ) print _( "Use -h for help." ) print _( "Program terminated." ) sys.exit() try: # Makes sure date is of valid format. # Makes sure only one space is kept between date and hour. t = time.strptime( date, '%Y-%m-%d %H:%M:%S' ) split = date.split() date = "%s %s" %( split[0], split[1] ) except: print _( "Error. The date format must be YYYY-MM-DD HH:MM:SS" ) print _( "Use -h for help." ) print _( "Program terminated." ) sys.exit() #TODO :fixStartEnd method??? if fixedPrevious : if daily : span = "daily" graphicType = "daily" start, end = StatsDateLib.getStartEndFromPreviousDay( date ) elif weekly: span = "weekly" graphicType = "weekly" start, end = StatsDateLib.getStartEndFromPreviousWeek( date ) elif monthly: span = "monthly" graphicType = "monthly" start, end = StatsDateLib.getStartEndFromPreviousMonth( date ) elif yearly: span = "yearly" graphicType = "yearly" start, end = StatsDateLib.getStartEndFromPreviousYear( date ) timeSpan = int( StatsDateLib.getSecondsSinceEpoch( end ) - StatsDateLib.getSecondsSinceEpoch( start ) ) / 3600 elif fixedCurrent: if daily : span = "daily" graphicType = "daily" start, end = StatsDateLib.getStartEndFromCurrentDay( date ) elif weekly: span = "weekly" graphicType = "weekly" start, end = StatsDateLib.getStartEndFromCurrentWeek( date ) elif monthly: span = "monthly" graphicType = "monthly" start, end = StatsDateLib.getStartEndFromCurrentMonth( date ) elif yearly: span = "yearly" graphicType = "yearly" start, end = StatsDateLib.getStartEndFromCurrentYear( date ) timeSpan = int( StatsDateLib.getSecondsSinceEpoch( end ) - StatsDateLib.getSecondsSinceEpoch( start ) ) / 3600 else: #TODO fix span method??? if daily : timeSpan = 24 graphicType = "daily" span = "daily" elif weekly: timeSpan = 24 * 7 graphicType = "weekly" span = "weekly" elif monthly: timeSpan = 24 * 30 graphicType = "monthly" span = "monthly" elif yearly: timeSpan = 24 * 365 graphicType = "yearly" span = "yearly" start = StatsDateLib.getIsoFromEpoch( StatsDateLib.getSecondsSinceEpoch( date ) - timeSpan*60*60 ) end = date if fileType != "tx" and fileType != "rx": print _("Error. File type must be either tx or rx.") print _('Multiple types are not accepted.' ) print _("Use -h for additional help.") print _("Program terminated.") sys.exit() if includeGroups == True: configParameters = StatsConfigParameters() configParameters.getAllParameters() groups = configParameters.groupParameters.groups machinesToSearch = machines[:]#Forces a copy and nota reference. for machine in machines: if machinesAreClusters == True : machineConfig = MachineConfigParameters() machineConfig.getParametersFromMachineConfigurationFile() machinesAssociatedWith = machineConfig.getMachinesAssociatedWith( machine ) machinesToTest = str(machinesAssociatedWith).replace('[','').replace(']', '').replace(',','').replace( "'",'' ).replace('"','' ).\ replace(" ",'').replace('[','').replace(']', '').replace(',','').replace( "'",'' ).replace('"','' ).replace(" ",'') for group in groups: groupsMachine = str( configParameters.groupParameters.groupsMachines[group] ).replace('[','').replace(']', '').\ replace(',','').replace( "'",'' ).replace('"','' ).replace(" ",'') #print "machinesToTest %s groupsMachine %s" %(machinesToTest,groupsMachine ) if machinesToTest in groupsMachine : if groupsMachine not in machinesToSearch: machinesToSearch.append(groupsMachine) #print machines infos = _CsvInfos( start = start , end = end , span = span, timeSpan = timeSpan, fileType = fileType, machinesForLabels = machines,\ machinesToSearch = machinesToSearch, machinesAreClusters = machinesAreClusters, dataSource = "databases", outputLanguage = outputLanguage ) return infos
def mergePicklesFromSameHour( logger = None , pickleNames = None, mergedPickleName = "",\ clientName = "" , combinedMachineName = "", currentTime = "",\ fileType = "tx" ): """ @summary: This methods receives a list of filenames referring to pickled FileStatsEntries. After the merger pickles get saved since they might be reused somewhere else. @precondition: Pickle should be of the same timespan and bucket width. If not no merging will occur. """ if logger != None : logger.debug( _("Call to mergePickles received.") ) logging = True else: logging = False entryList = [] for pickle in pickleNames:#for every pickle we eneed to merge if os.path.isfile( pickle ): entryList.append( CpickleWrapper.load( pickle ) ) else:#Use empty entry if there is no existing pickle of that name endTime = StatsDateLib.getIsoFromEpoch( StatsDateLib.getSecondsSinceEpoch( currentTime ) + StatsDateLib.HOUR ) entryList.append( FileStatsCollector( startTime = currentTime, endTime = endTime,logger =logger, logging =logging ) ) if logger != None : logger.warning( _("Pickle named %s did not exist. Empty entry was used instead.") %pickle ) #start off with a carbon copy of first pickle in list. newFSC = FileStatsCollector( files = entryList[0].files , statsTypes = entryList[0].statsTypes, startTime = entryList[0].startTime,\ endTime = entryList[0].endTime, interval=entryList[0].interval, totalWidth = entryList[0].totalWidth,\ firstFilledEntry = entryList[0].firstFilledEntry, lastFilledEntry = entryList[0].lastFilledEntry,\ maxLatency = entryList[0].maxLatency, fileEntries = entryList[0].fileEntries,logger = logger,\ logging = logging ) if PickleMerging.entryListIsValid( entryList ) == True : for i in range ( 1 , len( entryList ) ): #add other entries for file in entryList[i].files : if file not in newFSC.files : newFSC.files.append( file ) for j in range( len( newFSC.fileEntries ) ) : # add all entries newFSC.fileEntries[j].values.productTypes.extend( entryList[i].fileEntries[j].values.productTypes ) newFSC.fileEntries[j].files.extend( entryList[i].fileEntries[j].files ) newFSC.fileEntries[j].times.extend( entryList[i].fileEntries[j].times ) newFSC.fileEntries[j].nbFiles = newFSC.fileEntries[j].nbFiles + ( newFSC.fileEntries[ j ].nbFiles) for type in newFSC.statsTypes : newFSC.fileEntries[j].values.dictionary[type].extend( entryList[i].fileEntries[j].values.dictionary[type] ) newFSC.fileEntries[j].values.rows = newFSC.fileEntries[j].values.rows + entryList[i].fileEntries[j].values.rows newFSC = newFSC.setMinMaxMeanMedians( startingBucket = 0 , finishingBucket = newFSC.nbEntries -1 ) else:#Did not merge pickles named. Pickle list was not valid." if logger != None : logger.warning( _("Did not merge pickles named : %s. Pickle list was not valid.") %pickleNames ) logger.warning( _("Filled with empty entries instead.") %pickleNames ) newFSC.fileEntries = PickleMerging.fillWithEmptyEntries( nbEmptyEntries = 60 , entries = {} ) #prevents us from having ro remerge file later on. temp = newFSC.logger del newFSC.logger CpickleWrapper.save( newFSC, mergedPickleName ) try: os.chmod( mergedPickleName, 0777 ) except: pass #print "saved :%s" %mergedPickleName newFSC.logger = temp return newFSC
def addOptions( parser ): """ @summary : This method is used to add all available options to the option parser. """ parser.add_option("-c", "--combine", action="store_true", dest = "combine", default=False, \ help=_("Combine data from all specified machines.") ) parser.add_option("-d", "--date", action="store", type="string", dest="date", default=StatsDateLib.getIsoFromEpoch( time.time() ),\ help=_("Decide current time. Usefull for testing.") ) parser.add_option("-i", "--individual", action="store_true", dest = "individual", default=False, \ help=_("Create individual graphics for all specified machines.") ) parser.add_option( "-l", "--logins", action="store", type="string", dest="logins", default="pds",\ help = _("Logins to be used to connect to machines.") ) parser.add_option( "-m", "--machines", action="store", type="string", dest="machines", default=LOCAL_MACHINE,\ help = _("Machines for wich you want to collect data.") ) parser.add_option("-o", "--outputLanguage", action="store", type="string", dest="outputLanguage",\ default=LanguageTools.getMainApplicationLanguage(), help = _("Language in which the graphics are outputted.") ) parser.add_option("-s", "--span", action="store",type ="int", dest = "timespan", default=24, \ help=_("timespan( in hours) of the graphic."))
def addOptions(parser): """ @summary : This method is used to add all available options to the option parser. """ parser.add_option("-c", "--clients", action="store", type="string", dest="clients", default="", help=_("Clients' names")) parser.add_option("--copy", action="store_true", dest="copy", default=False, help=_("Create a copy file for the generated image.")) parser.add_option( "--combineClients", action="store_true", dest = "combineClients", default=False, \ help=_("Combine the data of all client into a single graphics for each graphic type.") ) parser.add_option("-d", "--date", action="store", type="string", dest="currentTime", \ default=StatsDateLib.getIsoFromEpoch( time.time() ), help=_("Decide current time. Usefull for testing.") ) parser.add_option("-f", "--fileType", action="store", type="string", dest="fileType", default='tx',\ help=_("Type of log files wanted.") ) parser.add_option( "-g", "--groupName", action="store", type="string", dest="groupName", default="", help= _("Specify a name for the combined graphics of numerous client/sources. Note : requires the --combinedClients options to work." )) parser.add_option( "-m", "--machines", action="store", type="string", dest="machines", default=LOCAL_MACHINE,\ help = _("Machines for wich you want to collect data.") ) parser.add_option("-n", "--collectUpToNow", action="store_true", dest = "collectUpToNow", default=False, \ help=_("Collect data up to current second.") ) parser.add_option("-o", "--outputLanguage", action="store", type="string", dest="outputLanguage",\ default=LanguageTools.getMainApplicationLanguage(), help = _("Language in which the graphics are outputted.") ) parser.add_option("-p", "--products", action="store", type = "string", dest = "productTypes", default=_("All"), \ help=_("Specific product types to look for in the data collected.") ) parser.add_option("-s", "--span", action="store", type="int", dest="timespan", default=12, help=_("timespan( in hours) of the graphic.")) parser.add_option("-t", "--types", type="string", dest="types", default=_("All"), help=_("Types of data to look for."))
def addOptions(parser): """ @summary : This method is used to add all available options to the option parser. """ parser.add_option("-c", "--combine", action="store_true", dest = "combine", default=False, \ help=_("Combine data from all specified machines.") ) parser.add_option("-d", "--date", action="store", type="string", dest="date", default=StatsDateLib.getIsoFromEpoch( time.time() ),\ help=_("Decide current time. Usefull for testing.") ) parser.add_option("-i", "--individual", action="store_true", dest = "individual", default=False, \ help=_("Create individual graphics for all specified machines.") ) parser.add_option( "-l", "--logins", action="store", type="string", dest="logins", default="pds",\ help = _("Logins to be used to connect to machines.") ) parser.add_option( "-m", "--machines", action="store", type="string", dest="machines", default=LOCAL_MACHINE,\ help = _("Machines for wich you want to collect data.") ) parser.add_option("-o", "--outputLanguage", action="store", type="string", dest="outputLanguage",\ default=LanguageTools.getMainApplicationLanguage(), help = _("Language in which the graphics are outputted.") ) parser.add_option("-s", "--span", action="store",type ="int", dest = "timespan", default=24, \ help=_("timespan( in hours) of the graphic."))