def updateLogFiles(): """ @summary : Downloads the log files from the source machines into the local machine. """ os.system( "clear" ) showPresentation() print "" print "" print "Updating log files...This may take a while...." configParameters = StatsConfigParameters( ) configParameters.getAllParameters() machineParameters = MachineConfigParameters() machineParameters.getParametersFromMachineConfigurationFile() for tag in configParameters.sourceMachinesTags: sourceMachines = machineParameters.getMachinesAssociatedWith(tag) for sourceMachine in sourceMachines: for i in range(3):#do 3 times in case of currently turning log files. status, output = commands.getstatusoutput( "rsync -avzr --delete-before -e ssh %s@%s:%s %s%s/ " %( machineParameters.getUserNameForMachine( sourceMachine ), sourceMachine , StatsPaths.PXLOG, StatsPaths.STATSLOGS, sourceMachine ) ) #print "rsync -avzr --delete-before -e ssh %s@%s:%s %s%s/ " %( machineParameters.getUserNameForMachine( sourceMachine ), sourceMachine , StatsPaths.PXLOG, StatsPaths.STATSLOGS, sourceMachine ) #print output time.sleep( 10 )
def getRxTxNames(localMachine, machine): """ Returns a tuple containing RXnames and TXnames of the currently running sources/clients of a desired machine. """ pxManager = PXManager() PXPaths.RX_CONF = GeneralStatsLibraryMethods.getPathToConfigFiles( localMachine, machine, 'rx') PXPaths.TX_CONF = GeneralStatsLibraryMethods.getPathToConfigFiles( localMachine, machine, 'tx') PXPaths.TRX_CONF = GeneralStatsLibraryMethods.getPathToConfigFiles( localMachine, machine, 'trx') pxManager.initNames() # Now you must call this method if localMachine != machine: try: parameters = MachineConfigParameters() parameters.getParametersFromMachineConfigurationFile() userName = parameters.getUserNameForMachine(machine) except: userName = "******" GeneralStatsLibraryMethods.updateConfigurationFiles( machine, userName) txNames = pxManager.getTxNames() rxNames = pxManager.getRxNames() return rxNames, txNames
def updateLogFiles(): """ @summary : Downloads the log files from the source machines into the local machine. """ os.system("clear") showPresentation() print "" print "" print "Updating log files...This may take a while...." configParameters = StatsConfigParameters() configParameters.getAllParameters() machineParameters = MachineConfigParameters() machineParameters.getParametersFromMachineConfigurationFile() for tag in configParameters.sourceMachinesTags: sourceMachines = machineParameters.getMachinesAssociatedWith(tag) for sourceMachine in sourceMachines: for i in range( 3): #do 3 times in case of currently turning log files. status, output = commands.getstatusoutput( "rsync -avzr --delete-before -e ssh %s@%s:%s %s%s/ " % (machineParameters.getUserNameForMachine(sourceMachine), sourceMachine, StatsPaths.PXLOG, StatsPaths.STATSLOGS, sourceMachine)) #print "rsync -avzr --delete-before -e ssh %s@%s:%s %s%s/ " %( machineParameters.getUserNameForMachine( sourceMachine ), sourceMachine , StatsPaths.PXLOG, StatsPaths.STATSLOGS, sourceMachine ) #print output time.sleep(10)
def getRxTxNames( localMachine, machine ): """ Returns a tuple containing RXnames and TXnames of the currently running sources/clients of a desired machine. """ pxManager = PXManager() PXPaths.RX_CONF = GeneralStatsLibraryMethods.getPathToConfigFiles( localMachine, machine, 'rx' ) PXPaths.TX_CONF = GeneralStatsLibraryMethods.getPathToConfigFiles( localMachine, machine, 'tx' ) PXPaths.TRX_CONF = GeneralStatsLibraryMethods.getPathToConfigFiles( localMachine, machine, 'trx' ) pxManager.initNames() # Now you must call this method if localMachine != machine : try: parameters = MachineConfigParameters() parameters.getParametersFromMachineConfigurationFile() userName = parameters.getUserNameForMachine(machine) except: userName = "******" GeneralStatsLibraryMethods.updateConfigurationFiles( machine, userName ) txNames = pxManager.getTxNames() rxNames = pxManager.getRxNames() return rxNames, txNames
def __generateAllRRDGraphicsForWebPage(self, graphicType, generateTotalsGraphics=True): """ @summary : This method generates new rrd graphics based on the specified graphics @param graphicType : daily weekly monthly or yearly @raise Exception : When graphicType is unknown. """ supportedGraphicTypes = { "daily": "-d", "weekly": "-w", "monthly": "-m", "yearly": "-y" } if graphicType not in supportedGraphicTypes: raise Exception( "Unsupported graphicType detected in __generateAllGraphicsForGroups" ) configParameters = StatsConfigParameters() configParameters.getAllParameters() machineConfig = MachineConfigParameters() machineConfig.getParametersFromMachineConfigurationFile() machinePairs = machineConfig.getListOfPairsAssociatedWithListOfTags( configParameters.sourceMachinesTags) for machinePair in machinePairs: machinePair = str(machinePair).replace("[", "").replace( "]", "").replace(" ", "").replace("'", "").replace('"', '') #individual graphics commands.getstatusoutput( "%sgenerateRRDGraphics.py %s --copy -f tx --machines '%s' --havingRun --date '%s' --fixedCurrent --language %s"\ %( self.paths.STATSBIN, supportedGraphicTypes[graphicType], machinePair, self.timeOfRequest, self.outputLanguage ) ) # print "%sgenerateRRDGraphics.py %s --copy -f tx --machines '%s' --havingRun --date '%s' --fixedCurrent --language %s"\ # %( self.paths.STATSBIN, supportedGraphicTypes[graphicType], machinePair, self.timeOfRequest, self.outputLanguage ) commands.getstatusoutput( "%sgenerateRRDGraphics.py %s --copy -f rx --machines '%s' --havingRun --date '%s' --fixedCurrent --language %s"\ %( self.paths.STATSBIN, supportedGraphicTypes[graphicType], machinePair, self.timeOfRequest, self.outputLanguage ) ) # print "%sgenerateRRDGraphics.py %s --copy -f rx --machines '%s' --havingRun --date '%s' --fixedCurrent --language %s"\ # %( self.paths.STATSBIN, supportedGraphicTypes[graphicType], machinePair, self.timeOfRequest, self.outputLanguage ) if generateTotalsGraphics == True: #print output commands.getstatusoutput( '%sgenerateRRDGraphics.py %s --copy --totals -f "rx" --machines "%s" --havingRun --fixedCurrent --date "%s" --language %s'\ %( self.paths.STATSBIN, supportedGraphicTypes[graphicType], machinePair, self.timeOfRequest, self.outputLanguage ) ) # print '%sgenerateRRDGraphics.py %s --copy --totals -f "rx" --machines "%s" --havingRun --fixedCurrent --date "%s" --language %s'\ # %( self.paths.STATSBIN, supportedGraphicTypes[graphicType], machinePair, self.timeOfRequest, self.outputLanguage ) commands.getstatusoutput( '%sgenerateRRDGraphics.py %s --copy --totals -f "tx" --machines "%s" --havingRun --fixedCurrent --date "%s" --language %s'\ %( self.paths.STATSBIN, supportedGraphicTypes[graphicType], machinePair, self.timeOfRequest, self.outputLanguage ) )
def transferLogFiles(): """ @summary : Log files will not be tansferred if local machine is not designed to be a pickling machine. If log files are to be transferred, they will be straight from the source." """ paths = StatsPaths() paths.setPaths() parameters = StatsConfigParameters() machineParameters = MachineConfigParameters() machineParameters.getParametersFromMachineConfigurationFile() parameters.getAllParameters() individualSourceMachines = machineParameters.getMachinesAssociatedWithListOfTags( parameters.sourceMachinesTags ) individualPicklingMachines = machineParameters.getMachinesAssociatedWithListOfTags( parameters.picklingMachines ) for sourceMachine,picklingMachine in map( None, individualSourceMachines, individualPicklingMachines ) : if picklingMachine == LOCAL_MACHINE :#pickling to be done here userName = machineParameters.getUserNameForMachine(sourceMachine) remoteLogPath = paths.getPXPathFromMachine( paths.PXLOG, sourceMachine, userName ) print "rsync -avzr --delete-before -e ssh %s@%s:%s %s%s/ " %( userName , sourceMachine,remoteLogPath , paths.STATSLOGS, sourceMachine ) output = commands.getoutput( "rsync -avzr --delete-before -e ssh %s@%s:%s %s%s/ " %( userName , sourceMachine, remoteLogPath, paths.STATSLOGS, sourceMachine ) ) print output
def getGroupSettingsFromConfigurationFile(self): """ Reads all the group settings from the configuration file. """ groupParameters = GroupConfigParameters([], {}, {}, {}, {}) machineParameters = MachineConfigParameters() machineParameters.getParametersFromMachineConfigurationFile() paths = StatsPaths() paths.setBasicPaths() config = paths.STATSETC + "config" fileHandle = open(config, "r") line = fileHandle.readline() #read until groups section, or EOF while line != "" and "[specialGroups]" not in line: line = fileHandle.readline() if line != "": #read until next section, or EOF line = fileHandle.readline() while line != "" and "[" not in line: if line != '\n' and line[0] != '#': splitLine = line.split() if len(splitLine) == 6: groupName = splitLine[0] if groupName not in (groupParameters.groups): groupParameters.groups.append(groupName) groupParameters.groupsMachines[groupName] = [] groupParameters.groupFileTypes[groupName] = [] groupParameters.groupsMembers[groupName] = [] groupParameters.groupsProducts[groupName] = [] machines = splitLine[2].split(",") for machine in machines: groupParameters.groupsMachines[ groupName].extend( machineParameters. getMachinesAssociatedWith(machine)) groupParameters.groupFileTypes[ groupName] = splitLine[3] groupParameters.groupsMembers[groupName].extend( splitLine[4].split(",")) groupParameters.groupsProducts[groupName].extend( splitLine[5].split(",")) line = fileHandle.readline() fileHandle.close() self.groupParameters = groupParameters
def getDetailedParametersFromMachineConfig( self ): ''' @summary: Sets all the detailed parameters found in the config files based on what is read in the config file and the machine config file. @note: All parameters for this object should be set prior to calling this method. ''' machineParameters = MachineConfigParameters() machineParameters.getParametersFromMachineConfigurationFile() self.detailedParameters = DetailedStatsParameters() for machineTag, picklingMachine in map( None, self.sourceMachinesTags, self.picklingMachines ): sourceMachines = machineParameters.getMachinesAssociatedWith( machineTag ) picklingMachines = machineParameters.getMachinesAssociatedWith( picklingMachine ) if sourceMachines != []: self.detailedParameters.sourceMachinesForTag[machineTag] = [] self.detailedParameters.picklingMachines[machineTag] =[] for machine in sourceMachines : if machine not in self.detailedParameters.individualSourceMachines: self.detailedParameters.sourceMachinesForTag[machineTag].append(machine) self.detailedParameters.individualSourceMachines.append(machine) self.detailedParameters.sourceMachinesLogins[machine] = machineParameters.getUserNameForMachine(machine) for machine in picklingMachines: if machine not in self.detailedParameters.sourceMachinesForTag[machineTag]: self.detailedParameters.picklingMachines[machineTag].append(machine) self.detailedParameters.picklingMachinesLogins[machine] = machineParameters.getUserNameForMachine(machine) for uploadMachine in self.graphicsUpLoadMachines: uploadMachines = machineParameters.getMachinesAssociatedWith( uploadMachine ) if uploadMachines != []: for machine in uploadMachines: if machine not in self.detailedParameters.uploadMachines: self.detailedParameters.uploadMachines.append(machine) self.detailedParameters.uploadMachinesLogins[machine] = machineParameters.getUserNameForMachine(machine) for dbMachine in self.machinesToBackupInDb: dbMachines = machineParameters.getMachinesAssociatedWith(dbMachine) if dbMachines !=[]: for machine in dbMachines: if machine not in self.detailedParameters.databaseMachines: self.detailedParameters.databaseMachines.append(machine)
def getGroupSettingsFromConfigurationFile( self ): """ Reads all the group settings from the configuration file. """ groupParameters = GroupConfigParameters([], {}, {}, {},{} ) machineParameters = MachineConfigParameters() machineParameters.getParametersFromMachineConfigurationFile() paths = StatsPaths() paths.setBasicPaths() config = paths.STATSETC + "config" fileHandle = open( config, "r" ) line = fileHandle.readline()#read until groups section, or EOF while line != "" and "[specialGroups]" not in line: line = fileHandle.readline() if line != "":#read until next section, or EOF line = fileHandle.readline() while line != "" and "[" not in line: if line != '\n' and line[0] != '#' : splitLine = line.split() if len( splitLine ) == 6: groupName = splitLine[0] if groupName not in (groupParameters.groups): groupParameters.groups.append( groupName ) groupParameters.groupsMachines[groupName] = [] groupParameters.groupFileTypes[groupName] = [] groupParameters.groupsMembers[groupName] = [] groupParameters.groupsProducts[groupName] = [] machines = splitLine[2].split(",") for machine in machines: groupParameters.groupsMachines[groupName].extend( machineParameters.getMachinesAssociatedWith(machine) ) groupParameters.groupFileTypes[groupName] = splitLine[3] groupParameters.groupsMembers[groupName].extend( splitLine[4].split(",") ) groupParameters.groupsProducts[groupName].extend( splitLine[5].split(",") ) line = fileHandle.readline() fileHandle.close() self.groupParameters = groupParameters
def __generateAllRRDGraphicsForWebPage( self, graphicType, generateTotalsGraphics = True ): """ @summary : This method generates new rrd graphics based on the specified graphics @param graphicType : daily weekly monthly or yearly @raise Exception : When graphicType is unknown. """ supportedGraphicTypes = { "daily": "-d", "weekly":"-w", "monthly":"-m", "yearly":"-y" } if graphicType not in supportedGraphicTypes: raise Exception( "Unsupported graphicType detected in __generateAllGraphicsForGroups" ) configParameters = StatsConfigParameters( ) configParameters.getAllParameters() machineConfig = MachineConfigParameters() machineConfig.getParametersFromMachineConfigurationFile() machinePairs = machineConfig.getListOfPairsAssociatedWithListOfTags(configParameters.sourceMachinesTags) for machinePair in machinePairs: machinePair = str(machinePair).replace( "[", "" ).replace( "]", "" ).replace( " ", "" ).replace( "'", "" ).replace( '"','' ) #individual graphics commands.getstatusoutput( "%sgenerateRRDGraphics.py %s --copy -f tx --machines '%s' --havingRun --date '%s' --fixedCurrent --language %s"\ %( self.paths.STATSBIN, supportedGraphicTypes[graphicType], machinePair, self.timeOfRequest, self.outputLanguage ) ) # print "%sgenerateRRDGraphics.py %s --copy -f tx --machines '%s' --havingRun --date '%s' --fixedCurrent --language %s"\ # %( self.paths.STATSBIN, supportedGraphicTypes[graphicType], machinePair, self.timeOfRequest, self.outputLanguage ) commands.getstatusoutput( "%sgenerateRRDGraphics.py %s --copy -f rx --machines '%s' --havingRun --date '%s' --fixedCurrent --language %s"\ %( self.paths.STATSBIN, supportedGraphicTypes[graphicType], machinePair, self.timeOfRequest, self.outputLanguage ) ) # print "%sgenerateRRDGraphics.py %s --copy -f rx --machines '%s' --havingRun --date '%s' --fixedCurrent --language %s"\ # %( self.paths.STATSBIN, supportedGraphicTypes[graphicType], machinePair, self.timeOfRequest, self.outputLanguage ) if generateTotalsGraphics == True : #print output commands.getstatusoutput( '%sgenerateRRDGraphics.py %s --copy --totals -f "rx" --machines "%s" --havingRun --fixedCurrent --date "%s" --language %s'\ %( self.paths.STATSBIN, supportedGraphicTypes[graphicType], machinePair, self.timeOfRequest, self.outputLanguage ) ) # print '%sgenerateRRDGraphics.py %s --copy --totals -f "rx" --machines "%s" --havingRun --fixedCurrent --date "%s" --language %s'\ # %( self.paths.STATSBIN, supportedGraphicTypes[graphicType], machinePair, self.timeOfRequest, self.outputLanguage ) commands.getstatusoutput( '%sgenerateRRDGraphics.py %s --copy --totals -f "tx" --machines "%s" --havingRun --fixedCurrent --date "%s" --language %s'\ %( self.paths.STATSBIN, supportedGraphicTypes[graphicType], machinePair, self.timeOfRequest, self.outputLanguage ) )
def updateMachineNamesBasedOnExistingMachineTags(self): """ @summary : browses the list of existing machine tags to see if the specified name was a tag or not. """ machineParameters = MachineConfigParameters() machineParameters.getParametersFromMachineConfigurationFile() for i in range( len( self.machines ) ): if self.machines[i] in machineParameters.getMachineTags(): self.machines[i] = str( machineParameters.getMachinesAssociatedWith(self.machines[i])).replace( "[", "" ).replace( "]", "" ).replace( " ", "" )
def generateTopWebPage(self): """ @summary : Generates the top web page based on the """ configParameters = StatsConfigParameters() configParameters.getAllParameters() machineParameters = MachineConfigParameters() machineParameters.getParametersFromMachineConfigurationFile() supportedLanguages = LanguageTools.getSupportedLanguages() self.__createTheWebPage( configParameters.sourceMachinesTags, supportedLanguages )
def generateTopWebPage(self): """ @summary : Generates the top web page based on the """ configParameters = StatsConfigParameters() configParameters.getAllParameters() machineParameters = MachineConfigParameters() machineParameters.getParametersFromMachineConfigurationFile() supportedLanguages = LanguageTools.getSupportedLanguages() self.__createTheWebPage(configParameters.sourceMachinesTags, supportedLanguages)
def updatePickleFiles(infos): """ @summary : Updates pickles files from the specified start time to the specified end time. @param infos : @note : If update is not up to now, we presume that updating log files could cause us to loose precious log files. Therefore we update log files only if update is up to now, where we absolutely need recent log files. """ needToupdateLogFiles = askUserAboutUpdatingLogs(infos) if needToupdateLogFiles == True: updateLogFiles() configParameters = StatsConfigParameters() configParameters.getAllParameters() machineParameters = MachineConfigParameters() machineParameters.getParametersFromMachineConfigurationFile() os.system("clear") showPresentation() print "" print "" print "Updating pickles....This may take a while..." print "" for tag in configParameters.sourceMachinesTags: sourceMachines = machineParameters.getMachinesAssociatedWith(tag) for sourceMachine in sourceMachines: status, output = commands.getstatusoutput( "python %spickleUpdater.py -f rx -m %s " % (StatsPaths.STATSBIN, sourceMachine)) #print output #print "python %spickleUpdater.py -f rx -m %s " %( StatsPaths.STATSBIN, sourceMachine ) print "Updated rx pickles for : %s" % (sourceMachine) status, output = commands.getstatusoutput( "python %spickleUpdater.py -f tx -m %s " % (StatsPaths.STATSBIN, sourceMachine)) #print "python %spickleUpdater.py -f tx -m %s " %( StatsPaths.STATSBIN,sourceMachine ) #print output print "Updated tx pickles for : %s" % (sourceMachine)
def updatePickleFiles( infos ): """ @summary : Updates pickles files from the specified start time to the specified end time. @param infos : @note : If update is not up to now, we presume that updating log files could cause us to loose precious log files. Therefore we update log files only if update is up to now, where we absolutely need recent log files. """ needToupdateLogFiles = askUserAboutUpdatingLogs( infos ) if needToupdateLogFiles == True : updateLogFiles() configParameters = StatsConfigParameters( ) configParameters.getAllParameters() machineParameters = MachineConfigParameters() machineParameters.getParametersFromMachineConfigurationFile() os.system( "clear" ) showPresentation() print "" print "" print "Updating pickles....This may take a while..." print "" for tag in configParameters.sourceMachinesTags: sourceMachines = machineParameters.getMachinesAssociatedWith(tag) for sourceMachine in sourceMachines: status, output = commands.getstatusoutput( "python %spickleUpdater.py -f rx -m %s "%( StatsPaths.STATSBIN, sourceMachine ) ) #print output #print "python %spickleUpdater.py -f rx -m %s " %( StatsPaths.STATSBIN, sourceMachine ) print "Updated rx pickles for : %s" %(sourceMachine) status, output = commands.getstatusoutput( "python %spickleUpdater.py -f tx -m %s " %( StatsPaths.STATSBIN, sourceMachine) ) #print "python %spickleUpdater.py -f tx -m %s " %( StatsPaths.STATSBIN,sourceMachine ) #print output print "Updated tx pickles for : %s" %(sourceMachine)
def main(): """ @summary: Small test case to see if everything works fine """ statsConfig = StatsConfigParameters() statsConfig.getAllParameters() machineconfig = MachineConfigParameters() machineconfig.getParametersFromMachineConfigurationFile() currentTimeEpochFormat = time.time() - (120 * 60) endTime = StatsDateLib.getIsoWithRoundedHours( StatsDateLib.getIsoFromEpoch(currentTimeEpochFormat)) startTime = StatsDateLib.getIsoWithRoundedHours( StatsDateLib.getIsoFromEpoch(currentTimeEpochFormat - (StatsDateLib.DAY * 7))) print startTime, endTime groupName = statsConfig.groupParameters.groups[0] clients = statsConfig.groupParameters.groupsMembers[groupName] machines = statsConfig.groupParameters.groupsMachines[groupName] fileType = statsConfig.groupParameters.groupFileTypes[groupName] seperators = [startTime] seperators.extend( StatsDateLib.getSeparatorsWithStartTime( startTime=startTime, width=StatsDateLib.DAY * 7, interval=StatsDateLib.HOUR)[:-1]) listOfFiles = PickleMerging.createMergedPicklesList( startTime, endTime, clients, groupName, fileType, machines, seperators) listOfFileSizes = MemoryManagement.getListOfFileSizes(listOfFiles) currentFreeMemory = MemoryManagement.getCurrentFreeMemory(0.55555) if MemoryManagement.getTotalSizeListOfFiles( listOfFiles) > currentFreeMemory: seperators = MemoryManagement.getSeperatorsForHourlyTreatments( startTime, endTime, currentFreeMemory, listOfFileSizes) print seperators else: print "We have %s bytes free and the pickles require %s bytes" % ( currentFreeMemory, getTotalSizeListOfFiles(listOfFiles)) print "we have enough memory to merge all these pickles."
def runPickleTransfersToRRDDatabases( infos ): """ @summary : Runs the transfer from pickles to rrd databases from the start times found in the backup being used and until the specified end time. @param infos : """ os.system( "clear" ) showPresentation() print "" print "Updating databases...This may take a while..." print "" parameters = StatsConfigParameters( ) parameters.getAllParameters() machineParameters = MachineConfigParameters() machineParameters.getParametersFromMachineConfigurationFile() for tag in parameters.machinesToBackupInDb : machines = machineParameters.getMachinesAssociatedWith(tag) machines = str( machines ).replace( "[", "" ).replace( "]", "" ).replace( " ", "" ) status, output = commands.getstatusoutput( "%stransferPickleToRRD.py -m '%s' -e '%s'" %(StatsPaths.STATSBIN, machines, infos.databasesRecollectionEndTime ) ) #print "%stransferPickleToRRD.py -m '%s' -e '%s'" %(StatsPaths.STATSBIN, machines, infos.databasesRecollectionEndTime ) #print "output:%s" %output print "Databases were updated for the following cluster : %s" %( tag ) if parameters.groupParameters.groups != []: for group in parameters.groupParameters.groups : groupMembers = str( parameters.groupParameters.groupsMembers[group]).replace( "[", "" ).replace( "]", "" ).replace( " ", "" ) groupMachines = str( parameters.groupParameters.groupsMachines[group] ).replace( "[", "" ).replace( "]", "" ).replace( " ", "" ) groupProducts = str( parameters.groupParameters.groupsProducts[group] ).replace( "[", "" ).replace( "]", "" ).replace( " ", "" ) groupFileTypes = str(parameters.groupParameters.groupFileTypes[group]).replace( "[", "" ).replace( "]", "" ).replace( " ", "" ) status, output = commands.getstatusoutput( "%stransferPickleToRRD.py -c '%s' -m '%s' -e '%s' -g '%s' -f %s -p '%s' " %( StatsPaths.STATSBIN, groupMembers, groupMachines, infos.databasesRecollectionEndTime, group, groupFileTypes, groupProducts ) ) #print "%stransferPickleToRRD.py -c '%s' -m '%s' -e '%s' -g '%s' -f %s -p '%s' " %( StatsPaths.STATSBIN, groupMembers, groupMachines, infos.databasesRecollectionEndTime, group, groupFileTypes, groupProducts ) #print output print "Databases were updated for the following group : %s " %( group )
def getCurrentlyActiveMachine(): """ @return: Returns the list of currently active source machines found within the config file. """ machineParameters = MachineConfigParameters() machineParameters.getParametersFromMachineConfigurationFile() configParameters = StatsConfigParameters() configParameters.getAllParameters() currentlyActiveMachines=[] for tag in configParameters.sourceMachinesTags: currentlyActiveMachines.extend( machineParameters.getMachinesAssociatedWith(tag) ) currentlyActiveMachines.extend( [concat ( machineParameters.getMachinesAssociatedWith(tag)) for tag in configParameters.sourceMachinesTags ] ) return currentlyActiveMachines
def generateWebPage( self ): """ @summary : """ configParameters = StatsConfigParameters() configParameters.getAllParameters() machineParameters = MachineConfigParameters() machineParameters.getParametersFromMachineConfigurationFile() self.printWebPage( configParameters.sourceMachinesTags, machineParameters )
def main(): """ @summary: Small test case to see if everything works fine """ statsConfig = StatsConfigParameters() statsConfig.getAllParameters() machineconfig = MachineConfigParameters() machineconfig.getParametersFromMachineConfigurationFile() currentTimeEpochFormat = time.time() -(120*60) endTime = StatsDateLib.getIsoWithRoundedHours( StatsDateLib.getIsoFromEpoch( currentTimeEpochFormat ) ) startTime = StatsDateLib.getIsoWithRoundedHours( StatsDateLib.getIsoFromEpoch( currentTimeEpochFormat -( StatsDateLib.DAY*7 ) ) ) print startTime, endTime groupName = statsConfig.groupParameters.groups[0] clients = statsConfig.groupParameters.groupsMembers[ groupName ] machines = statsConfig.groupParameters.groupsMachines[ groupName ] fileType = statsConfig.groupParameters.groupFileTypes[ groupName ] seperators = [startTime] seperators.extend( StatsDateLib.getSeparatorsWithStartTime( startTime = startTime , width=StatsDateLib.DAY*7, interval=StatsDateLib.HOUR )[:-1]) listOfFiles = PickleMerging.createMergedPicklesList( startTime, endTime, clients, groupName, fileType, machines, seperators ) listOfFileSizes = MemoryManagement.getListOfFileSizes(listOfFiles) currentFreeMemory = MemoryManagement.getCurrentFreeMemory(0.55555) if MemoryManagement.getTotalSizeListOfFiles( listOfFiles ) > currentFreeMemory: seperators = MemoryManagement.getSeperatorsForHourlyTreatments( startTime, endTime, currentFreeMemory, listOfFileSizes ) print seperators else: print "We have %s bytes free and the pickles require %s bytes" %( currentFreeMemory, getTotalSizeListOfFiles( listOfFiles ) ) print "we have enough memory to merge all these pickles."
def transferLogFiles(): """ @summary : Log files will not be tansferred if local machine is not designed to be a pickling machine. If log files are to be transferred, they will be straight from the source." """ paths = StatsPaths() paths.setPaths() parameters = StatsConfigParameters() machineParameters = MachineConfigParameters() machineParameters.getParametersFromMachineConfigurationFile() parameters.getAllParameters() individualSourceMachines = machineParameters.getMachinesAssociatedWithListOfTags( parameters.sourceMachinesTags) individualPicklingMachines = machineParameters.getMachinesAssociatedWithListOfTags( parameters.picklingMachines) for sourceMachine, picklingMachine in map(None, individualSourceMachines, individualPicklingMachines): if picklingMachine == LOCAL_MACHINE: #pickling to be done here userName = machineParameters.getUserNameForMachine(sourceMachine) remoteLogPath = paths.getPXPathFromMachine(paths.PXLOG, sourceMachine, userName) print "rsync -avzr --delete-before -e ssh %s@%s:%s %s%s/ " % ( userName, sourceMachine, remoteLogPath, paths.STATSLOGS, sourceMachine) output = commands.getoutput( "rsync -avzr --delete-before -e ssh %s@%s:%s %s%s/ " % (userName, sourceMachine, remoteLogPath, paths.STATSLOGS, sourceMachine)) print output
def __generateAllForDailyWebPage(self, copyToColumbosFolder=True, generateTotalsGraphics=True): """ @summary : Gets all the required daily graphs. @param getGraphicsMissingSinceLastUpdate : Whether or not to generate the daily graphics that did not get generated since the last update. @param generateTotalsGraphics : Whether or not to generate the graphics displaying the totals for each clusters. @todo : Add proper support for copyToColumbosFolder when generateAllGraphics finally support """ configParameters = StatsConfigParameters() configParameters.getAllParameters() machineConfig = MachineConfigParameters() machineConfig.getParametersFromMachineConfigurationFile() machinePairs = machineConfig.getPairedMachinesAssociatedWithListOfTags( configParameters.sourceMachinesTags) for machineTag in configParameters.sourceMachinesTags: logins = [] machines = configParameters.detailedParameters.sourceMachinesForTag[ machineTag] for machine in machines: logins.append(machineConfig.getUserNameForMachine(machine)) logins = str(logins).replace("[", "").replace("]", "").replace(" ", "") machines = str(machines).replace("[", "").replace("]", "").replace(" ", "") if "," in machines: output = commands.getoutput( "%sgenerateAllGnuGraphicsForMachines.py -m '%s' -c -l '%s' --date '%s' --outputLanguage %s "\ %( self.paths.STATSBIN, machines.replace( "'","" ), logins.replace( "'","" ), self.timeOfRequest, self.outputLanguage) ) #print "%sgenerateAllGnuGraphicsForMachines.py -m '%s' -c -l '%s' --date '%s' --outputLanguage %s "\ #%( self.paths.STATSBIN, machines.replace( "'","" ), logins.replace( "'","" ), self.timeOfRequest, self.outputLanguage ) #print output else: output = commands.getoutput( "%sgenerateAllGnuGraphicsForMachines.py -i -m '%s' -l '%s' --date '%s' --outputLanguage %s " % (self.paths.STATSBIN, machines.replace( "'", ""), logins.replace( "'", ""), self.timeOfRequest, self.outputLanguage)) #print "%sgenerateAllGnuGraphicsForMachines.py -i -m '%s' -l '%s' --date '%s' --outputLanguage %s " %( self.paths.STATSBIN, machines.replace( "'","" ), logins.replace( "'","" ), self.timeOfRequest, self.outputLanguage ) #print output if generateTotalsGraphics == True: for machinePair in machinePairs: #Generate all the daily total graphs. commands.getoutput( '%sgenerateRRDGraphics.py --copy --totals -f "rx" --machines "%s" -d --fixedCurrent --date "%s" --language %s'\ %( self.paths.STATSBIN, machinePair, self.timeOfRequest, self.outputLanguage) ) #print '%sgenerateRRDGraphics.py --copy --totals -f "rx" --machines "%s" -d --fixedCurrent --date "%s" --language %s'\ # %( self.paths.STATSBIN, machinePair, self.timeOfRequest, self.outputLanguage) commands.getoutput( '%sgenerateRRDGraphics.py --copy --totals -f "tx" --machines "%s" -d --fixedCurrent --date "%s" --language %s'\ %( self.paths.STATSBIN, machinePair, self.timeOfRequest, self.outputLanguage ) )
def getDataFromDatabases(sourlients, dataTypes, infos): """ @summary: Gathers up all the requried data from allthe concerned databases @param sourlients: List of sources clients for wich we need to gather up data. @param machines: Machines on which the clients reside. @param dataTypes: Datatypes for which we need to collect data. @return : Return the data dictionary filled with all the collected data. """ data = {} for sourlient in sourlients.keys(): data[sourlient] = {} sourlientsMachines = sourlients[sourlient] for machine in infos.machinesToSearch: if infos.machinesAreClusters == True: machineConfig = MachineConfigParameters() machineConfig.getParametersFromMachineConfigurationFile() machines = machineConfig.getMachinesAssociatedWith(machine) oldMachine = machine machine = str(machines).replace('[','').replace(']', '').replace(',','').replace( "'",'' ).replace('"','' ).replace(" ",'')\ .replace('[','').replace(']', '').replace(',','').replace( "'",'' ).replace('"','' ).replace(" ",'') if machine == '': #print "trouvaille !!!" machine = oldMachine if machine in sourlientsMachines: data[sourlient][machine] = {} for dataType in dataTypes: if infos.outputLanguage != 'en': translatedDataType = LanguageTools.translateDataType( dataType, "en", infos.outputLanguage) else: translatedDataType = dataType databaseName = RrdUtilities.buildRRDFileName( dataType=translatedDataType, clients=[sourlient], machines=[machine], fileType=infos.fileType) if not os.path.isfile(databaseName): if infos.includegroups == True: databaseName = RrdUtilities.buildRRDFileName( dataType=translatedDataType, groupName=sourlient, machines=[machine], fileType=infos.fileType, usage="group") lastUpdate = RrdUtilities.getDatabaseTimeOfUpdate( databaseName, infos.fileType) fetchedInterval = getInterval(int( StatsDateLib.getSecondsSinceEpoch(infos.start)), lastUpdate, dataType, goal="fetchData") desiredInterval = getInterval(int( StatsDateLib.getSecondsSinceEpoch(infos.start)), lastUpdate, dataType, goal="plotGraphic") interval = desiredInterval minimum, maximum, mean, total = getGraphicsMinMaxMeanTotal( databaseName, int(StatsDateLib.getSecondsSinceEpoch(infos.start)),\ int(StatsDateLib.getSecondsSinceEpoch(infos.end)), infos.span,\ fetchedInterval,desiredInterval, type = "average" ) data[sourlient][machine][dataType] = {} data[sourlient][machine][dataType]["min"] = minimum data[sourlient][machine][dataType]["max"] = maximum data[sourlient][machine][dataType]["mean"] = mean data[sourlient][machine][dataType]["total"] = total return data
def getOptionsFromParser(parser): """ @summary: Parses and validates the options found in the parser. @return: If information was found to be valid, return options """ infos = None date = [] (options, args) = parser.parse_args() machines = options.machines.replace(' ', '').split(',') date = options.date.replace('"', '').replace("'", '') fileType = options.fileType.replace("'", '') daily = options.daily weekly = options.weekly monthly = options.monthly yearly = options.yearly fixedCurrent = options.fixedCurrent fixedPrevious = options.fixedPrevious turnOffLogging = options.turnOffLogging includeGroups = options.includeGroups machinesAreClusters = options.machinesAreClusters outputLanguage = options.outputLanguage if fixedPrevious and fixedCurrent: print _( "Error. Please use only one of the fixed options,either fixedPrevious or fixedCurrent. " ) print _("Use -h for help.") print _("Program terminated.") sys.exit() counter = 0 specialParameters = [daily, monthly, weekly, yearly] for specialParameter in specialParameters: if specialParameter: counter = counter + 1 if counter > 1: print _( "Error. Only one of the daily, weekly and yearly options can be use at a time " ) print _("Use -h for help.") print _("Program terminated.") sys.exit() elif counter == 0: print _("Error. Please use either the -d -m -w or -y options. ") print _("Use -h for help.") print _("Program terminated.") sys.exit() try: # Makes sure date is of valid format. # Makes sure only one space is kept between date and hour. t = time.strptime(date, '%Y-%m-%d %H:%M:%S') split = date.split() date = "%s %s" % (split[0], split[1]) except: print _("Error. The date format must be YYYY-MM-DD HH:MM:SS") print _("Use -h for help.") print _("Program terminated.") sys.exit() #TODO :fixStartEnd method??? if fixedPrevious: if daily: span = "daily" graphicType = "daily" start, end = StatsDateLib.getStartEndFromPreviousDay(date) elif weekly: span = "weekly" graphicType = "weekly" start, end = StatsDateLib.getStartEndFromPreviousWeek(date) elif monthly: span = "monthly" graphicType = "monthly" start, end = StatsDateLib.getStartEndFromPreviousMonth(date) elif yearly: span = "yearly" graphicType = "yearly" start, end = StatsDateLib.getStartEndFromPreviousYear(date) timeSpan = int( StatsDateLib.getSecondsSinceEpoch(end) - StatsDateLib.getSecondsSinceEpoch(start)) / 3600 elif fixedCurrent: if daily: span = "daily" graphicType = "daily" start, end = StatsDateLib.getStartEndFromCurrentDay(date) elif weekly: span = "weekly" graphicType = "weekly" start, end = StatsDateLib.getStartEndFromCurrentWeek(date) elif monthly: span = "monthly" graphicType = "monthly" start, end = StatsDateLib.getStartEndFromCurrentMonth(date) elif yearly: span = "yearly" graphicType = "yearly" start, end = StatsDateLib.getStartEndFromCurrentYear(date) timeSpan = int( StatsDateLib.getSecondsSinceEpoch(end) - StatsDateLib.getSecondsSinceEpoch(start)) / 3600 else: #TODO fix span method??? if daily: timeSpan = 24 graphicType = "daily" span = "daily" elif weekly: timeSpan = 24 * 7 graphicType = "weekly" span = "weekly" elif monthly: timeSpan = 24 * 30 graphicType = "monthly" span = "monthly" elif yearly: timeSpan = 24 * 365 graphicType = "yearly" span = "yearly" start = StatsDateLib.getIsoFromEpoch( StatsDateLib.getSecondsSinceEpoch(date) - timeSpan * 60 * 60) end = date if fileType != "tx" and fileType != "rx": print _("Error. File type must be either tx or rx.") print _('Multiple types are not accepted.') print _("Use -h for additional help.") print _("Program terminated.") sys.exit() if includeGroups == True: configParameters = StatsConfigParameters() configParameters.getAllParameters() groups = configParameters.groupParameters.groups machinesToSearch = machines[:] #Forces a copy and nota reference. for machine in machines: if machinesAreClusters == True: machineConfig = MachineConfigParameters() machineConfig.getParametersFromMachineConfigurationFile() machinesAssociatedWith = machineConfig.getMachinesAssociatedWith( machine) machinesToTest = str(machinesAssociatedWith).replace('[','').replace(']', '').replace(',','').replace( "'",'' ).replace('"','' ).\ replace(" ",'').replace('[','').replace(']', '').replace(',','').replace( "'",'' ).replace('"','' ).replace(" ",'') for group in groups: groupsMachine = str( configParameters.groupParameters.groupsMachines[group] ).replace('[','').replace(']', '').\ replace(',','').replace( "'",'' ).replace('"','' ).replace(" ",'') #print "machinesToTest %s groupsMachine %s" %(machinesToTest,groupsMachine ) if machinesToTest in groupsMachine: if groupsMachine not in machinesToSearch: machinesToSearch.append(groupsMachine) #print machines infos = _CsvInfos( start = start , end = end , span = span, timeSpan = timeSpan, fileType = fileType, machinesForLabels = machines,\ machinesToSearch = machinesToSearch, machinesAreClusters = machinesAreClusters, dataSource = "databases", outputLanguage = outputLanguage ) return infos
def main(): """ @summary : Gets all the parameters from config file. Updates pickle files. Generates all the required graphics. Generates therwuired csv files. Updates the different web pages. Updates the desired databases. Uploads graphics to the required machines. Monitors the result of all the activities. """ if GeneralStatsLibraryMethods.processIsAlreadyRunning( "pxStatsStartup") == False: setGlobalLanguageParameters() GeneralStatsLibraryMethods.createLockFile("pxStatsStartup") currentTime = time.time() currentTimeInIsoFormat = StatsDateLib.getIsoFromEpoch(currentTime) generalParameters = StatsConfigParameters() generalParameters.getAllParameters() machineParameters = MachineConfigParameters() machineParameters.getParametersFromMachineConfigurationFile() validateParameters(generalParameters, machineParameters, None) tagsNeedingUpdates = getMachinesTagsNeedingUpdates( generalParameters, machineParameters) if tagsNeedingUpdates == None: #no previous parameter found saveCurrentMachineParameters(machineParameters) elif tagsNeedingUpdates != []: updateFilesAssociatedWithMachineTags(tagsNeedingUpdates, machineParameters) saveCurrentMachineParameters(machineParameters) updatePickles(generalParameters, machineParameters, currentTimeInIsoFormat) updateDatabases(generalParameters, machineParameters, currentTimeInIsoFormat) backupRRDDatabases(generalParameters, currentTimeInIsoFormat, generalParameters.nbDbBackupsToKeep) #updateCsvFiles( ) getGraphicsForWebPages(generalParameters, currentTimeInIsoFormat) updateWebPages(generalParameters) #uploadGraphicFiles( generalParameters, machineParameters ) cleanUp(generalParameters, currentTimeInIsoFormat, generalParameters.daysOfPicklesToKeep) monitorActivities(generalParameters, currentTimeInIsoFormat) updateManager = AutomaticUpdatesManager( generalParameters.nbAutoUpdatesLogsToKeep, "pxStatsStartup") updateManager.addAutomaticUpdateToLogs(currentTimeInIsoFormat) GeneralStatsLibraryMethods.deleteLockFile("pxStatsStartup") print _("Finished.") else: print _( "Error. An other instance of pxStatsStartup is allready running.") print _("Only one instance of this software can be run at once.") print _( "Please terminate the other instance or wait for it to end it's execution" ) print _("before running this program again.") print _("Program terminated.") sys.exit()
def getAllClientOrSourcesNamesFromMachines( infos ): """ @summary : Goes through all the machines and finds out wich client or sources currently run on each of those machines. To make sure no confusion arrises if to clinets or source have the same name on different machhines or cluster, the returned names will be associated with all the machines/clusters with whom they are associated as to let the caller hadnle the situation as it pleases. @param infos: Infos that were gathered at program call. @return : The dictionary containing the names and their associated machines. """ sourlients ={} for machine in infos.machinesToSearch: if infos.machinesAreClusters == True: machineConfig = MachineConfigParameters() machineConfig.getParametersFromMachineConfigurationFile() machines = machineConfig.getMachinesAssociatedWith( machine ) if machines != []: #print machines machine = str( machines ).replace('[','').replace(']', '').replace(',','').replace( "'",'' ).replace('"','' ).replace(" ",'') if machine != '': rxNames, txNames = GeneralStatsLibraryMethods.getRxTxNamesHavingRunDuringPeriod( infos.start, infos.end, machines, pattern = None, havingrunOnAllMachines = True ) else: rxNames, txNames = [],[] else: #might be a groups machine,in this case do not force to have run on all machines..... if machine != '': rxNames, txNames = GeneralStatsLibraryMethods.getRxTxNamesHavingRunDuringPeriod( infos.start, infos.end, [machine], pattern = None, havingrunOnAllMachines = False ) else: rxNames, txNames = [],[] else:#not a cluster, une mahcine name directly. Force to have run exclusivly on specified machine. if machine != '': rxNames, txNames = GeneralStatsLibraryMethods.getRxTxNamesHavingRunDuringPeriod( infos.start, infos.end, [machine], pattern = None, havingrunOnAllMachines = True ) else: rxNames, txNames = [],[] if infos.fileType == "rx": namesToAdd = rxNames elif infos.fileType == "tx": namesToAdd = txNames for nameToAdd in namesToAdd: if nameToAdd in sourlients.keys(): if machine not in sourlients[nameToAdd]: sourlients[nameToAdd].append( machine ) else: sourlients[nameToAdd] = [ machine ] configParameters = StatsConfigParameters() configParameters.getAllParameters() groups = configParameters.groupParameters.groups for group in groups : if configParameters.groupParameters.groupFileTypes[group] == infos.fileType : sourlients[group] = [str( configParameters.groupParameters.groupsMachines[group] ).replace('[','').replace(']', '').\ replace(',','').replace( "'",'' ).replace('"','' ).replace(" ",'')] return sourlients
def getOptionsFromParser( parser ): """ @summary: Parses and validates the options found in the parser. @return: If information was found to be valid, return options """ infos = None date = [] ( options, args )= parser.parse_args() machines = options.machines.replace( ' ','').split(',') date = options.date.replace('"','').replace("'",'') fileType = options.fileType.replace("'",'') daily = options.daily weekly = options.weekly monthly = options.monthly yearly = options.yearly fixedCurrent = options.fixedCurrent fixedPrevious = options.fixedPrevious turnOffLogging = options.turnOffLogging includeGroups = options.includeGroups machinesAreClusters = options.machinesAreClusters outputLanguage = options.outputLanguage if fixedPrevious and fixedCurrent: print _("Error. Please use only one of the fixed options,either fixedPrevious or fixedCurrent. " ) print _("Use -h for help.") print _("Program terminated.") sys.exit() counter = 0 specialParameters = [daily, monthly, weekly, yearly] for specialParameter in specialParameters: if specialParameter: counter = counter + 1 if counter > 1 : print _( "Error. Only one of the daily, weekly and yearly options can be use at a time " ) print _( "Use -h for help." ) print _( "Program terminated." ) sys.exit() elif counter == 0: print _( "Error. Please use either the -d -m -w or -y options. " ) print _( "Use -h for help." ) print _( "Program terminated." ) sys.exit() try: # Makes sure date is of valid format. # Makes sure only one space is kept between date and hour. t = time.strptime( date, '%Y-%m-%d %H:%M:%S' ) split = date.split() date = "%s %s" %( split[0], split[1] ) except: print _( "Error. The date format must be YYYY-MM-DD HH:MM:SS" ) print _( "Use -h for help." ) print _( "Program terminated." ) sys.exit() #TODO :fixStartEnd method??? if fixedPrevious : if daily : span = "daily" graphicType = "daily" start, end = StatsDateLib.getStartEndFromPreviousDay( date ) elif weekly: span = "weekly" graphicType = "weekly" start, end = StatsDateLib.getStartEndFromPreviousWeek( date ) elif monthly: span = "monthly" graphicType = "monthly" start, end = StatsDateLib.getStartEndFromPreviousMonth( date ) elif yearly: span = "yearly" graphicType = "yearly" start, end = StatsDateLib.getStartEndFromPreviousYear( date ) timeSpan = int( StatsDateLib.getSecondsSinceEpoch( end ) - StatsDateLib.getSecondsSinceEpoch( start ) ) / 3600 elif fixedCurrent: if daily : span = "daily" graphicType = "daily" start, end = StatsDateLib.getStartEndFromCurrentDay( date ) elif weekly: span = "weekly" graphicType = "weekly" start, end = StatsDateLib.getStartEndFromCurrentWeek( date ) elif monthly: span = "monthly" graphicType = "monthly" start, end = StatsDateLib.getStartEndFromCurrentMonth( date ) elif yearly: span = "yearly" graphicType = "yearly" start, end = StatsDateLib.getStartEndFromCurrentYear( date ) timeSpan = int( StatsDateLib.getSecondsSinceEpoch( end ) - StatsDateLib.getSecondsSinceEpoch( start ) ) / 3600 else: #TODO fix span method??? if daily : timeSpan = 24 graphicType = "daily" span = "daily" elif weekly: timeSpan = 24 * 7 graphicType = "weekly" span = "weekly" elif monthly: timeSpan = 24 * 30 graphicType = "monthly" span = "monthly" elif yearly: timeSpan = 24 * 365 graphicType = "yearly" span = "yearly" start = StatsDateLib.getIsoFromEpoch( StatsDateLib.getSecondsSinceEpoch( date ) - timeSpan*60*60 ) end = date if fileType != "tx" and fileType != "rx": print _("Error. File type must be either tx or rx.") print _('Multiple types are not accepted.' ) print _("Use -h for additional help.") print _("Program terminated.") sys.exit() if includeGroups == True: configParameters = StatsConfigParameters() configParameters.getAllParameters() groups = configParameters.groupParameters.groups machinesToSearch = machines[:]#Forces a copy and nota reference. for machine in machines: if machinesAreClusters == True : machineConfig = MachineConfigParameters() machineConfig.getParametersFromMachineConfigurationFile() machinesAssociatedWith = machineConfig.getMachinesAssociatedWith( machine ) machinesToTest = str(machinesAssociatedWith).replace('[','').replace(']', '').replace(',','').replace( "'",'' ).replace('"','' ).\ replace(" ",'').replace('[','').replace(']', '').replace(',','').replace( "'",'' ).replace('"','' ).replace(" ",'') for group in groups: groupsMachine = str( configParameters.groupParameters.groupsMachines[group] ).replace('[','').replace(']', '').\ replace(',','').replace( "'",'' ).replace('"','' ).replace(" ",'') #print "machinesToTest %s groupsMachine %s" %(machinesToTest,groupsMachine ) if machinesToTest in groupsMachine : if groupsMachine not in machinesToSearch: machinesToSearch.append(groupsMachine) #print machines infos = _CsvInfos( start = start , end = end , span = span, timeSpan = timeSpan, fileType = fileType, machinesForLabels = machines,\ machinesToSearch = machinesToSearch, machinesAreClusters = machinesAreClusters, dataSource = "databases", outputLanguage = outputLanguage ) return infos
def getRxTxNamesForWebPages(start, end): """ @summary: Returns two dictionaries, rx and tx, whose keys is the list of rx or tx having run between start and end. If key has an associated value different from "", this means that the entry is a group tag name. The value will be an html description of the group. @param start: Start of the span to look into. @param end: End of the span to look into. @return: see summary. """ rxNames = {} txNames = {} configParameters = StatsConfigParameters() configParameters.getAllParameters() machineParameters = MachineConfigParameters() machineParameters.getParametersFromMachineConfigurationFile() for sourceMachinesTag in configParameters.sourceMachinesTags: machines = machineParameters.getMachinesAssociatedWith( sourceMachinesTag) newRxNames, newTxNames = GeneralStatsLibraryMethods.getRxTxNamesHavingRunDuringPeriod( start, end, machines) for rxName in newRxNames: description = "<font color='#008800'>--Source Name : </font> <font color='#006699'>%s</font> <br> <font color='#008800'>--Machine(s) : </font><font color='#006699'>%s</font> <br> " % ( rxName, str(machines).replace('[', '').replace(']', '')) rxNames[rxName] = description for txName in newTxNames: description = "<font color='#008800'>--Client Name : </font> <font color='#006699'>%s</font> <br> <font color='#008800'>--Machine(s) : </font><font color='#006699'>%s</font> <br> " % ( txName, str(machines).replace('[', '').replace(']', '')) txNames[txName] = description for group in configParameters.groupParameters.groups: #print group machines = configParameters.groupParameters.groupsMachines[group] machines = str(machines).replace("[", "").replace("]", "").replace( " ", "").replace(",", ", ") members = configParameters.groupParameters.groupsMembers[group] members = str(members).replace("[", "").replace("]", "").replace( " ", "").replace(",", ", ") fileTypes = configParameters.groupParameters.groupFileTypes[group] fileTypes = str(fileTypes).replace("[", "").replace("]", "").replace( " ", "").replace(",", ", ") products = configParameters.groupParameters.groupsProducts[group] products = str(products).replace("[", "").replace("]", "").replace( " ", "").replace(",", ", ") description = "<font color='#008800'>--Group Name : </font> <font color='#006699'>%s</font> <br> <font color='#008800'>--Machine(s) : </font><font color='#006699'>%s</font> <br> <font color='#008800'>--Member(s) : </font><font color='#006699'>%s</font> <br> <font color='#008800'>--FileType : </font><font color='#006699'>%s</font> <br> <font color='#008800'>--Product(s) pattern(s) : </font><font color='#006699'>%s</font> " % ( group, machines, members, fileTypes, products) if configParameters.groupParameters.groupFileTypes[group] == "tx": txNames[group] = description elif configParameters.groupParameters.groupFileTypes[ group] == "rx": rxNames[group] = description return rxNames, txNames
def main(): """ @summary : Gets all the parameters from config file. Updates pickle files. Generates all the required graphics. Generates therwuired csv files. Updates the different web pages. Updates the desired databases. Uploads graphics to the required machines. Monitors the result of all the activities. """ if GeneralStatsLibraryMethods.processIsAlreadyRunning( "pxStatsStartup" ) == False: setGlobalLanguageParameters() GeneralStatsLibraryMethods.createLockFile( "pxStatsStartup" ) currentTime = time.time() currentTimeInIsoFormat = StatsDateLib.getIsoFromEpoch( currentTime ) generalParameters = StatsConfigParameters() generalParameters.getAllParameters() machineParameters = MachineConfigParameters() machineParameters.getParametersFromMachineConfigurationFile() validateParameters( generalParameters, machineParameters, None ) tagsNeedingUpdates = getMachinesTagsNeedingUpdates( generalParameters, machineParameters ) if tagsNeedingUpdates == None : #no previous parameter found saveCurrentMachineParameters( machineParameters ) elif tagsNeedingUpdates != [] : updateFilesAssociatedWithMachineTags( tagsNeedingUpdates, machineParameters ) saveCurrentMachineParameters( machineParameters ) updatePickles( generalParameters, machineParameters, currentTimeInIsoFormat ) updateDatabases( generalParameters, machineParameters, currentTimeInIsoFormat ) backupRRDDatabases( generalParameters, currentTimeInIsoFormat, generalParameters.nbDbBackupsToKeep ) #updateCsvFiles( ) getGraphicsForWebPages( generalParameters, currentTimeInIsoFormat ) updateWebPages( generalParameters ) #uploadGraphicFiles( generalParameters, machineParameters ) cleanUp( generalParameters , currentTimeInIsoFormat, generalParameters.daysOfPicklesToKeep ) monitorActivities( generalParameters, currentTimeInIsoFormat ) updateManager = AutomaticUpdatesManager( generalParameters.nbAutoUpdatesLogsToKeep, "pxStatsStartup" ) updateManager.addAutomaticUpdateToLogs( currentTimeInIsoFormat ) GeneralStatsLibraryMethods.deleteLockFile( "pxStatsStartup" ) print _( "Finished." ) else: print _( "Error. An other instance of pxStatsStartup is allready running." ) print _( "Only one instance of this software can be run at once." ) print _( "Please terminate the other instance or wait for it to end it's execution" ) print _( "before running this program again." ) print _( "Program terminated." ) sys.exit()
def getAllClientOrSourcesNamesFromMachines(infos): """ @summary : Goes through all the machines and finds out wich client or sources currently run on each of those machines. To make sure no confusion arrises if to clinets or source have the same name on different machhines or cluster, the returned names will be associated with all the machines/clusters with whom they are associated as to let the caller hadnle the situation as it pleases. @param infos: Infos that were gathered at program call. @return : The dictionary containing the names and their associated machines. """ sourlients = {} for machine in infos.machinesToSearch: if infos.machinesAreClusters == True: machineConfig = MachineConfigParameters() machineConfig.getParametersFromMachineConfigurationFile() machines = machineConfig.getMachinesAssociatedWith(machine) if machines != []: #print machines machine = str(machines).replace('[', '').replace( ']', '').replace(',', '').replace("'", '').replace('"', '').replace(" ", '') if machine != '': rxNames, txNames = GeneralStatsLibraryMethods.getRxTxNamesHavingRunDuringPeriod( infos.start, infos.end, machines, pattern=None, havingrunOnAllMachines=True) else: rxNames, txNames = [], [] else: #might be a groups machine,in this case do not force to have run on all machines..... if machine != '': rxNames, txNames = GeneralStatsLibraryMethods.getRxTxNamesHavingRunDuringPeriod( infos.start, infos.end, [machine], pattern=None, havingrunOnAllMachines=False) else: rxNames, txNames = [], [] else: #not a cluster, une mahcine name directly. Force to have run exclusivly on specified machine. if machine != '': rxNames, txNames = GeneralStatsLibraryMethods.getRxTxNamesHavingRunDuringPeriod( infos.start, infos.end, [machine], pattern=None, havingrunOnAllMachines=True) else: rxNames, txNames = [], [] if infos.fileType == "rx": namesToAdd = rxNames elif infos.fileType == "tx": namesToAdd = txNames for nameToAdd in namesToAdd: if nameToAdd in sourlients.keys(): if machine not in sourlients[nameToAdd]: sourlients[nameToAdd].append(machine) else: sourlients[nameToAdd] = [machine] configParameters = StatsConfigParameters() configParameters.getAllParameters() groups = configParameters.groupParameters.groups for group in groups: if configParameters.groupParameters.groupFileTypes[ group] == infos.fileType: sourlients[group] = [str( configParameters.groupParameters.groupsMachines[group] ).replace('[','').replace(']', '').\ replace(',','').replace( "'",'' ).replace('"','' ).replace(" ",'')] return sourlients
def getMaximumGaps( self ): """ @summary : Reads columbos maxSettings.conf file @returns: The maximum gaps dictionnary containing the maximium gap allowed between two transfers for a specific client. ex : { clientX: 60 } """ allNames = [] rxNames = [] txNames = [] maximumGaps = {} try: machineConfig = MachineConfigParameters() machineConfig.getParametersFromMachineConfigurationFile() generalParameters = StatsConfigParameters() generalParameters.getAllParameters() self.__updateMaxSettingsFile( generalParameters ) for tag in generalParameters.sourceMachinesTags: try: #print tag #print "in getMaximumGaps %s" %generalParameters.detailedParameters.sourceMachinesForTag machines = generalParameters.detailedParameters.sourceMachinesForTag[tag] machine = machines[0] except: raise Exception( "Invalid tag found in main configuration file." ) newRxNames, newTxNames = GeneralStatsLibraryMethods.getRxTxNames( LOCAL_MACHINE, machine ) rxNames.extend(newRxNames) txNames.extend(newTxNames) allNames.extend( rxNames ) allNames.extend( txNames ) circuitsRegex, default_circuit, timersRegex, default_timer, pxGraphsRegex, default_pxGraph = readMaxFile.readQueueMax( self.maxSettingsFile, "PX" ) for key in timersRegex.keys(): #fill all explicitly set maximum gaps. values = timersRegex[key] newKey = key.replace( "^", "" ).replace( "$","").replace(".","") maximumGaps[newKey] = values for name in allNames:#add all clients/sources for wich no value was set if name not in maximumGaps.keys(): #no value was set nameFoundWithWildcard = False for key in timersRegex.keys(): # in case a wildcard character was used cleanKey = key.replace( "^", "" ).replace( "$","").replace(".","") if fnmatch.fnmatch( name, cleanKey ): maximumGaps[name] = timersRegex[key] nameFoundWithWildcard = True if nameFoundWithWildcard == False : maximumGaps[name] = default_timer except:#in case of corrupt or absent file maximumGaps = {} return maximumGaps
def getRxTxNamesForWebPages( start, end ): """ @summary: Returns two dictionaries, rx and tx, whose keys is the list of rx or tx having run between start and end. If key has an associated value different from "", this means that the entry is a group tag name. The value will be an html description of the group. @param start: Start of the span to look into. @param end: End of the span to look into. @return: see summary. """ rxNames = {} txNames = {} configParameters = StatsConfigParameters() configParameters.getAllParameters() machineParameters = MachineConfigParameters() machineParameters.getParametersFromMachineConfigurationFile() for sourceMachinesTag in configParameters.sourceMachinesTags: machines = machineParameters.getMachinesAssociatedWith( sourceMachinesTag ) newRxNames, newTxNames = GeneralStatsLibraryMethods.getRxTxNamesHavingRunDuringPeriod( start, end, machines ) for rxName in newRxNames : description = "<font color='#008800'>--Source Name : </font> <font color='#006699'>%s</font> <br> <font color='#008800'>--Machine(s) : </font><font color='#006699'>%s</font> <br> " %(rxName, str(machines).replace('[', '').replace(']', '') ) rxNames[rxName] = description for txName in newTxNames: description = "<font color='#008800'>--Client Name : </font> <font color='#006699'>%s</font> <br> <font color='#008800'>--Machine(s) : </font><font color='#006699'>%s</font> <br> " %(txName, str(machines).replace('[', '').replace(']', '') ) txNames[txName] = description for group in configParameters.groupParameters.groups: #print group machines = configParameters.groupParameters.groupsMachines[group] machines = str(machines ).replace( "[", "" ).replace( "]", "" ).replace( " ", "" ).replace(",",", ") members = configParameters.groupParameters.groupsMembers[group] members = str( members ).replace( "[", "" ).replace( "]", "" ).replace( " ", "" ).replace(",",", ") fileTypes = configParameters.groupParameters.groupFileTypes[group] fileTypes = str(fileTypes ).replace( "[", "" ).replace( "]", "" ).replace( " ", "" ).replace(",",", ") products = configParameters.groupParameters.groupsProducts[group] products = str( products ).replace( "[", "" ).replace( "]", "" ).replace( " ", "" ).replace(",",", ") description = "<font color='#008800'>--Group Name : </font> <font color='#006699'>%s</font> <br> <font color='#008800'>--Machine(s) : </font><font color='#006699'>%s</font> <br> <font color='#008800'>--Member(s) : </font><font color='#006699'>%s</font> <br> <font color='#008800'>--FileType : </font><font color='#006699'>%s</font> <br> <font color='#008800'>--Product(s) pattern(s) : </font><font color='#006699'>%s</font> " %(group, machines, members, fileTypes, products ) if configParameters.groupParameters.groupFileTypes[group] == "tx": txNames[group] = description elif configParameters.groupParameters.groupFileTypes[group] == "rx": rxNames[group] = description return rxNames, txNames
def __generateAllForDailyWebPage( self, copyToColumbosFolder = True, generateTotalsGraphics = True ): """ @summary : Gets all the required daily graphs. @param getGraphicsMissingSinceLastUpdate : Whether or not to generate the daily graphics that did not get generated since the last update. @param generateTotalsGraphics : Whether or not to generate the graphics displaying the totals for each clusters. @todo : Add proper support for copyToColumbosFolder when generateAllGraphics finally support """ configParameters = StatsConfigParameters( ) configParameters.getAllParameters() machineConfig = MachineConfigParameters() machineConfig.getParametersFromMachineConfigurationFile() machinePairs = machineConfig.getPairedMachinesAssociatedWithListOfTags(configParameters.sourceMachinesTags) for machineTag in configParameters.sourceMachinesTags: logins = [] machines = configParameters.detailedParameters.sourceMachinesForTag[machineTag] for machine in machines: logins.append( machineConfig.getUserNameForMachine(machine) ) logins = str(logins).replace( "[", "" ).replace( "]", "" ).replace( " ", "" ) machines = str(machines).replace( "[", "" ).replace( "]", "" ).replace( " ", "" ) if "," in machines : output = commands.getoutput( "%sgenerateAllGnuGraphicsForMachines.py -m '%s' -c -l '%s' --date '%s' --outputLanguage %s "\ %( self.paths.STATSBIN, machines.replace( "'","" ), logins.replace( "'","" ), self.timeOfRequest, self.outputLanguage) ) #print "%sgenerateAllGnuGraphicsForMachines.py -m '%s' -c -l '%s' --date '%s' --outputLanguage %s "\ #%( self.paths.STATSBIN, machines.replace( "'","" ), logins.replace( "'","" ), self.timeOfRequest, self.outputLanguage ) #print output else: output = commands.getoutput( "%sgenerateAllGnuGraphicsForMachines.py -i -m '%s' -l '%s' --date '%s' --outputLanguage %s " %( self.paths.STATSBIN, machines.replace( "'","" ), logins.replace( "'","" ), self.timeOfRequest, self.outputLanguage ) ) #print "%sgenerateAllGnuGraphicsForMachines.py -i -m '%s' -l '%s' --date '%s' --outputLanguage %s " %( self.paths.STATSBIN, machines.replace( "'","" ), logins.replace( "'","" ), self.timeOfRequest, self.outputLanguage ) #print output if generateTotalsGraphics == True : for machinePair in machinePairs: #Generate all the daily total graphs. commands.getoutput( '%sgenerateRRDGraphics.py --copy --totals -f "rx" --machines "%s" -d --fixedCurrent --date "%s" --language %s'\ %( self.paths.STATSBIN, machinePair, self.timeOfRequest, self.outputLanguage) ) #print '%sgenerateRRDGraphics.py --copy --totals -f "rx" --machines "%s" -d --fixedCurrent --date "%s" --language %s'\ # %( self.paths.STATSBIN, machinePair, self.timeOfRequest, self.outputLanguage) commands.getoutput( '%sgenerateRRDGraphics.py --copy --totals -f "tx" --machines "%s" -d --fixedCurrent --date "%s" --language %s'\ %( self.paths.STATSBIN, machinePair, self.timeOfRequest, self.outputLanguage ) )
def runPickleTransfersToRRDDatabases(infos): """ @summary : Runs the transfer from pickles to rrd databases from the start times found in the backup being used and until the specified end time. @param infos : """ os.system("clear") showPresentation() print "" print "Updating databases...This may take a while..." print "" parameters = StatsConfigParameters() parameters.getAllParameters() machineParameters = MachineConfigParameters() machineParameters.getParametersFromMachineConfigurationFile() for tag in parameters.machinesToBackupInDb: machines = machineParameters.getMachinesAssociatedWith(tag) machines = str(machines).replace("[", "").replace("]", "").replace(" ", "") status, output = commands.getstatusoutput( "%stransferPickleToRRD.py -m '%s' -e '%s'" % (StatsPaths.STATSBIN, machines, infos.databasesRecollectionEndTime)) #print "%stransferPickleToRRD.py -m '%s' -e '%s'" %(StatsPaths.STATSBIN, machines, infos.databasesRecollectionEndTime ) #print "output:%s" %output print "Databases were updated for the following cluster : %s" % (tag) if parameters.groupParameters.groups != []: for group in parameters.groupParameters.groups: groupMembers = str( parameters.groupParameters.groupsMembers[group]).replace( "[", "").replace("]", "").replace(" ", "") groupMachines = str( parameters.groupParameters.groupsMachines[group]).replace( "[", "").replace("]", "").replace(" ", "") groupProducts = str( parameters.groupParameters.groupsProducts[group]).replace( "[", "").replace("]", "").replace(" ", "") groupFileTypes = str( parameters.groupParameters.groupFileTypes[group]).replace( "[", "").replace("]", "").replace(" ", "") status, output = commands.getstatusoutput( "%stransferPickleToRRD.py -c '%s' -m '%s' -e '%s' -g '%s' -f %s -p '%s' " % (StatsPaths.STATSBIN, groupMembers, groupMachines, infos.databasesRecollectionEndTime, group, groupFileTypes, groupProducts)) #print "%stransferPickleToRRD.py -c '%s' -m '%s' -e '%s' -g '%s' -f %s -p '%s' " %( StatsPaths.STATSBIN, groupMembers, groupMachines, infos.databasesRecollectionEndTime, group, groupFileTypes, groupProducts ) #print output print "Databases were updated for the following group : %s " % ( group)
def getDataFromDatabases( sourlients, dataTypes, infos ): """ @summary: Gathers up all the requried data from allthe concerned databases @param sourlients: List of sources clients for wich we need to gather up data. @param machines: Machines on which the clients reside. @param dataTypes: Datatypes for which we need to collect data. @return : Return the data dictionary filled with all the collected data. """ data = {} for sourlient in sourlients.keys() : data[sourlient] = {} sourlientsMachines = sourlients[sourlient] for machine in infos.machinesToSearch : if infos.machinesAreClusters == True: machineConfig = MachineConfigParameters() machineConfig.getParametersFromMachineConfigurationFile() machines = machineConfig.getMachinesAssociatedWith( machine ) oldMachine = machine machine = str(machines).replace('[','').replace(']', '').replace(',','').replace( "'",'' ).replace('"','' ).replace(" ",'')\ .replace('[','').replace(']', '').replace(',','').replace( "'",'' ).replace('"','' ).replace(" ",'') if machine == '': #print "trouvaille !!!" machine = oldMachine if machine in sourlientsMachines: data[sourlient][machine] = {} for dataType in dataTypes : if infos.outputLanguage != 'en' : translatedDataType = LanguageTools.translateDataType( dataType, "en", infos.outputLanguage ) else : translatedDataType = dataType databaseName = RrdUtilities.buildRRDFileName( dataType = translatedDataType, clients= [sourlient], machines = [machine], fileType = infos.fileType ) if not os.path.isfile( databaseName ): if infos.includegroups == True: databaseName = RrdUtilities.buildRRDFileName(dataType = translatedDataType, groupName = sourlient, machines = [machine], fileType = infos.fileType, usage = "group" ) lastUpdate = RrdUtilities.getDatabaseTimeOfUpdate( databaseName, infos.fileType ) fetchedInterval = getInterval( int(StatsDateLib.getSecondsSinceEpoch(infos.start)), lastUpdate, dataType, goal = "fetchData" ) desiredInterval = getInterval( int(StatsDateLib.getSecondsSinceEpoch(infos.start)), lastUpdate, dataType, goal = "plotGraphic" ) interval = desiredInterval minimum, maximum, mean, total = getGraphicsMinMaxMeanTotal( databaseName, int(StatsDateLib.getSecondsSinceEpoch(infos.start)),\ int(StatsDateLib.getSecondsSinceEpoch(infos.end)), infos.span,\ fetchedInterval,desiredInterval, type = "average" ) data[sourlient][machine][dataType] = {} data[sourlient][machine][dataType]["min"] = minimum data[sourlient][machine][dataType]["max"] = maximum data[sourlient][machine][dataType]["mean"] = mean data[sourlient][machine][dataType]["total"] = total return data
def getDetailedParametersFromMachineConfig(self): ''' @summary: Sets all the detailed parameters found in the config files based on what is read in the config file and the machine config file. @note: All parameters for this object should be set prior to calling this method. ''' machineParameters = MachineConfigParameters() machineParameters.getParametersFromMachineConfigurationFile() self.detailedParameters = DetailedStatsParameters() for machineTag, picklingMachine in map(None, self.sourceMachinesTags, self.picklingMachines): sourceMachines = machineParameters.getMachinesAssociatedWith( machineTag) picklingMachines = machineParameters.getMachinesAssociatedWith( picklingMachine) if sourceMachines != []: self.detailedParameters.sourceMachinesForTag[machineTag] = [] self.detailedParameters.picklingMachines[machineTag] = [] for machine in sourceMachines: if machine not in self.detailedParameters.individualSourceMachines: self.detailedParameters.sourceMachinesForTag[ machineTag].append(machine) self.detailedParameters.individualSourceMachines.append( machine) self.detailedParameters.sourceMachinesLogins[ machine] = machineParameters.getUserNameForMachine( machine) for machine in picklingMachines: if machine not in self.detailedParameters.sourceMachinesForTag[ machineTag]: self.detailedParameters.picklingMachines[ machineTag].append(machine) self.detailedParameters.picklingMachinesLogins[ machine] = machineParameters.getUserNameForMachine( machine) for uploadMachine in self.graphicsUpLoadMachines: uploadMachines = machineParameters.getMachinesAssociatedWith( uploadMachine) if uploadMachines != []: for machine in uploadMachines: if machine not in self.detailedParameters.uploadMachines: self.detailedParameters.uploadMachines.append(machine) self.detailedParameters.uploadMachinesLogins[ machine] = machineParameters.getUserNameForMachine( machine) for dbMachine in self.machinesToBackupInDb: dbMachines = machineParameters.getMachinesAssociatedWith(dbMachine) if dbMachines != []: for machine in dbMachines: if machine not in self.detailedParameters.databaseMachines: self.detailedParameters.databaseMachines.append( machine)