def main(): """ @summary : Gathers options, then makes call to transferPickleToRRD """ paths = StatsPaths() paths.setPaths() language = 'en' setGlobalLanguageParameters() createPaths(paths) logger = Logger(paths.STATSLOGGING + 'stats_' + 'rrd_transfer' + '.log.notb', 'INFO', 'TX' + 'rrd_transfer', bytes=10000000) logger = logger.getLogger() parser = createParser() infos = getOptionsFromParser(parser, logger=logger) transferPickleToRRD(infos, logger=logger)
def getSavedList(self, user, clients): """ @summary : Returns the checksum of the files contained in the saved list. """ self.savedFileList = {} statsPaths = StatsPaths() statsPaths.setPaths() directory = statsPaths.STATSDATA + "fileAccessVersions/" combinedName = "" for client in clients: combinedName = combinedName + client fileName = combinedName + "_" + user try: self.savedFileList = CpickleWrapper.load(directory + fileName) if self.savedFileLis == None: self.savedFileList = {} except: # if file does not exist pass return self.savedFileList
def restoreDatabaseUpdateTimes(timeToRestore, currentTime, nbBackupsToKeep): """ @summary : Copy all databases into a folder sporting the data of the backup. @param timeToRestore : Time of the DB backups to set as current DB. @param currentTime : Time of the call to the script. @param nbBackupsToKeep : total number of backups to keep. """ statsPaths = StatsPaths() statsPaths.setPaths() source = statsPaths.STATSDBUPDATESBACKUPS + "/%s" % timeToRestore destination = statsPaths.STATSCURRENTDBUPDATES #Archive current Database backupRRDDatabases.backupDatabaseUpdateTimes(currentTime, nbBackupsToKeep, foldersToPreserve=[source]) #restore desired status, output = commands.getstatusoutput("rm -r %s" % (destination)) os.makedirs(destination) status, output = commands.getstatusoutput("cp -rf %s/* %s" % (source, destination)) print output
def getClientsCurrentFileList(self, clients): """ @summary : Gets all the files associated with the list of clients. @note : Client list is used here since we need to find all the pickles that will be used in a merger. Thus unlike all other methods we dont refer here to the combined name but rather to a list of individual machine names. @summary : Returns the all the files in a dictionnary associated with each file associated with it's mtime. """ fileNames = [] statsPaths = StatsPaths() statsPaths.setPaths() for client in clients: filePattern = statsPaths.STATSPICKLES + client + "/*/*" #_?? folderNames = glob.glob(filePattern) for folder in folderNames: if os.path.isdir(folder): filePattern = folder + "/" + "*_??" fileNames.extend(glob.glob(filePattern)) for fileName in fileNames: self.currentClientFileList[fileName] = os.path.getmtime( fileName) return self.currentClientFileList
def updateFilesAssociatedWithMachineTags( tagsNeedingUpdates, machineParameters ): """ @summary : For all the tags for wich a machine was change we rename all the files associated with that tag. @param tagsNeedingUpdates: List of tags that have been modified since the last call. """ paths = StatsPaths() paths.setPaths() previousParameters = getMachineParametersFromPreviousCall() for tag in tagsNeedingUpdates: previousCombinedMachineNames = "" previousCombinedMachineNames = previousCombinedMachineNames.join( [ x for x in previousParameters.getMachinesAssociatedWith( tag ) ] ) currentCombinedMachineNames = "" currentCombinedMachineNames = currentCombinedMachineNames.join( [ x for x in machineParameters.getMachinesAssociatedWith( tag ) ]) output = commands.getoutput( "%sfileRenamer.py -o %s -n %s --overrideConfirmation" %( paths.STATSTOOLS, previousCombinedMachineNames, currentCombinedMachineNames ) ) print "%sfileRenamer.py -o %s -n %s --overrideConfirmation" %( paths.STATSTOOLS, previousCombinedMachineNames, currentCombinedMachineNames ) print output
def getPreviousMonitoringJob( self, currentTime ): """ @summary : Gets the previous crontab from the pickle file. @return : Time of the previous monitoring job. @warning : Returns "" if file does not exist. """ statsPaths = StatsPaths() statsPaths.setPaths() file = "%spreviousMonitoringJob" %statsPaths.STATSMONITORING previousMonitoringJob = "" if os.path.isfile( file ): fileHandle = open( file, "r" ) previousMonitoringJob = pickle.load( fileHandle ) fileHandle.close() #print previousMonitoringJob else: previousMonitoringJob = StatsDateLib.getIsoTodaysMidnight( currentTime ) #print previousMonitoringJob return previousMonitoringJob
def __getDocFilesToLinkTo(self, language): """ @summary : Gathers and returns all the documentation files currently available @summary : The list of fileNames to link to. """ filesToLinkTo = [] statsPaths = StatsPaths() statsPaths.setPaths(self.mainLanguage) folder = statsPaths.STATSDOC + "html/" listOfFilesInFolder = os.listdir(folder) for file in listOfFilesInFolder: baseName = os.path.basename(file) if (fnmatch.fnmatch(baseName, "*_%s.html" % (language))): filesToLinkTo.append(baseName) filesToLinkTo.sort() return filesToLinkTo
def saveList(self, user, clients): """ @summary : Saves list. @note : Will include modification made in updateFileInlist method @param clients : Client to wich the file is related(used to narrow down searchs) @param user : Name of the client, person, etc.. wich has a relation with the file. """ statsPaths = StatsPaths() statsPaths.setPaths() directory = statsPaths.STATSDATA + "fileAccessVersions/" combinedName = "" for client in clients: combinedName = combinedName + client fileName = combinedName + "_" + user if not os.path.isdir(directory): os.makedirs(directory, mode=0777) #create directory completeFilename = directory + fileName #print "saving %s" %completeFilename CpickleWrapper.save(object=self.savedFileList, filename=completeFilename)
def addAutomaticUpdateToLogs( self, timeOfUpdateInIsoFormat, currentUpdateFrequency = None ): """ @summary : Writes a new file in the log folder containing the current update frequency. @param timeOfUpdateInIsoFormat: Time that the entries name will sport. """ paths = StatsPaths() paths.setPaths() fileName = paths.STATSTEMPAUTUPDTLOGS + self.updateType + "/" + str( timeOfUpdateInIsoFormat ).replace( " ", "_" ) #Safety to make sure if not os.path.isdir( os.path.dirname( fileName ) ): os.makedirs( os.path.dirname( fileName ), 0777 ) if currentUpdateFrequency == None : currentUpdateFrequency = self.getCurrentUpdateFrequency() CpickleWrapper.save( currentUpdateFrequency, fileName ) allEntries = os.listdir(paths.STATSTEMPAUTUPDTLOGS + self.updateType + "/") allEntries.sort() entriesToRemove = allEntries[ :-self.numberOfLogsToKeep] for entrytoRemove in entriesToRemove: os.remove(paths.STATSTEMPAUTUPDTLOGS + self.updateType + "/" + entrytoRemove )
def updateFilesAssociatedWithMachineTags(tagsNeedingUpdates, machineParameters): """ @summary : For all the tags for wich a machine was change we rename all the files associated with that tag. @param tagsNeedingUpdates: List of tags that have been modified since the last call. """ paths = StatsPaths() paths.setPaths() previousParameters = getMachineParametersFromPreviousCall() for tag in tagsNeedingUpdates: previousCombinedMachineNames = "" previousCombinedMachineNames = previousCombinedMachineNames.join( [x for x in previousParameters.getMachinesAssociatedWith(tag)]) currentCombinedMachineNames = "" currentCombinedMachineNames = currentCombinedMachineNames.join( [x for x in machineParameters.getMachinesAssociatedWith(tag)]) output = commands.getoutput( "%sfileRenamer.py -o %s -n %s --overrideConfirmation" % (paths.STATSTOOLS, previousCombinedMachineNames, currentCombinedMachineNames)) print "%sfileRenamer.py -o %s -n %s --overrideConfirmation" % ( paths.STATSTOOLS, previousCombinedMachineNames, currentCombinedMachineNames) print output
def main(): """ @summary : Small test case scenario allows for unit-like testing of the LanguageTools class. """ configParameters = StatsConfigParameters() configParameters.getAllParameters() language = configParameters.mainApplicationLanguage paths = StatsPaths() paths.setBasicPaths() print "Language set in config file : %s" %language print "Test1 : (Should show that the proper translation file will be used) " fileName = LanguageTools.getTranslationFileName( language, paths.STATSLIB + 'StatsPlotter' ) print "Translation file to be used : %s " %( fileName ) print "Test2 : (Should translate the word into the specified language) " translator = LanguageTools.getTranslator( fileName ) print "Translation for bytecount : %s" %( translator("bytecount") ) print "Test3 : (Should be the same result as test 2) " translator = LanguageTools.getTranslatorForModule( paths.STATSLIB + 'StatsPlotter', language ) print "Translation for bytecount : %s" %( translator("bytecount") ) print "Test4 : Unless translation changes, this should print 'filecount' " print "Result : ", LanguageTools.translateTerm("nbreDeFichiers", "fr", "en", paths.STATSLIB + "StatsPlotter.py" )
def getSavedList( self, user, clients ): """ @summary : Returns the checksum of the files contained in the saved list. """ self.savedFileList = {} statsPaths = StatsPaths() statsPaths.setPaths() directory = statsPaths.STATSDATA + "fileAccessVersions/" combinedName = "" for client in clients: combinedName = combinedName + client fileName = combinedName + "_" + user try : self.savedFileList = CpickleWrapper.load( directory + fileName ) if self.savedFileLis == None : self.savedFileList = {} except: # if file does not exist pass return self.savedFileList
def getClientsCurrentFileList( self, clients ): """ @summary : Gets all the files associated with the list of clients. @note : Client list is used here since we need to find all the pickles that will be used in a merger. Thus unlike all other methods we dont refer here to the combined name but rather to a list of individual machine names. @summary : Returns the all the files in a dictionnary associated with each file associated with it's mtime. """ fileNames = [] statsPaths = StatsPaths() statsPaths.setPaths() for client in clients : filePattern = statsPaths.STATSPICKLES + client + "/*/*" #_?? folderNames = glob.glob( filePattern ) for folder in folderNames: if os.path.isdir( folder ): filePattern = folder + "/" + "*_??" fileNames.extend( glob.glob( filePattern ) ) for fileName in fileNames : self.currentClientFileList[fileName] = os.path.getmtime( fileName ) return self.currentClientFileList
def backupRRDDatabases(configParameters, currentTime, nbBackupsToKeep): """ @summary: Based on current time and frequencies contained within the time parameters, we will backup the databases only if necessary. @param configParameters: StatsConfigParameters instance. @param currenTime: currentTime in seconds since epoch format. """ paths = StatsPaths() paths.setPaths() updateManager = AutomaticUpdatesManager( configParameters.nbAutoUpdatesLogsToKeep, "dbBackups") if updateManager.updateIsRequired(currentTime): commands.getstatusoutput(paths.STATSTOOLS + "backupRRDDatabases.py" + " " + str(int(nbBackupsToKeep))) print paths.STATSTOOLS + "backupRRDDatabases.py" + " " + str( nbBackupsToKeep) updateManager.addAutomaticUpdateToLogs(currentTime)
def cleanUp(configParameters, currentTime, daysOfPicklesToKeep): """ @summary: Based on current time and frequencies contained within the time parameters, we will run the cleaners that need to be run. @param configParameters: StatsConfigParameters instance. @param currenTime: currentTime in seconds since epoch format. """ paths = StatsPaths() paths.setPaths() updateManager = AutomaticUpdatesManager( configParameters.nbAutoUpdatesLogsToKeep, "picklecleaner") if updateManager.updateIsRequired(currentTime): output = commands.getoutput(paths.STATSTOOLS + "pickleCleaner.py %s" % int(daysOfPicklesToKeep)) print paths.STATSTOOLS + "pickleCleaner.py" + " " + str( daysOfPicklesToKeep) updateManager.addAutomaticUpdateToLogs(currentTime) updateManager = AutomaticUpdatesManager( configParameters.nbAutoUpdatesLogsToKeep, "generalCleaner") if updateManager.updateIsRequired(currentTime): commands.getstatusoutput(paths.STATSTOOLS + "clean_dir.plx" + " " + paths.STATSETC + "clean_dir.conf") print paths.STATSTOOLS + "clean_dir.plx" + " " + paths.STATSETC + "clean_dir.conf" updateManager.addAutomaticUpdateToLogs(currentTime)
def updatePickledTimes(dateToSet="2006-10-23 09:00:00"): """ @summary : Get all the keys then set all of them to the desired date. """ statsPaths = StatsPaths() statsPaths.setPaths() folder = statsPaths.STATSPICKLESTIMEOFUPDATES files = os.listdir(folder) for fileName in files: if os.path.isfile(fileName): fileHandle = open(fileName, "r") pickledTimes = pickle.load(fileHandle) fileHandle.close() keys = pickledTimes.keys() for key in keys: pickledTimes[key] = dateToSet fileHandle = open(fileName, "w") pickle.dump(pickledTimes, fileHandle) fileHandle.close()
def main(): """ @summary : Small test case scenario allows for unit-like testing of the LanguageTools class. """ configParameters = StatsConfigParameters() configParameters.getAllParameters() language = configParameters.mainApplicationLanguage paths = StatsPaths() paths.setBasicPaths() print "Language set in config file : %s" % language print "Test1 : (Should show that the proper translation file will be used) " fileName = LanguageTools.getTranslationFileName( language, paths.STATSLIB + 'StatsPlotter') print "Translation file to be used : %s " % (fileName) print "Test2 : (Should translate the word into the specified language) " translator = LanguageTools.getTranslator(fileName) print "Translation for bytecount : %s" % (translator("bytecount")) print "Test3 : (Should be the same result as test 2) " translator = LanguageTools.getTranslatorForModule( paths.STATSLIB + 'StatsPlotter', language) print "Translation for bytecount : %s" % (translator("bytecount")) print "Test4 : Unless translation changes, this should print 'filecount' " print "Result : ", LanguageTools.translateTerm( "nbreDeFichiers", "fr", "en", paths.STATSLIB + "StatsPlotter.py")
def __updateCsvFiles( self, type, clusters, cost ): """ @summary : Generate th rx and tx csv files for yesterday for all clusters. @param type : daily | weekly | monthly | yearly @param clusters : List of currently running source clusters. @param cost : total operational cost for the perido specified by the type @return : None """ paths = StatsPaths() paths.setPaths() typeParameters = { "daily" : "-d", "weekly" : "-w", "monthly" : "-m", "yearly" : "-y" } output = commands.getoutput( paths.STATSBIN + 'csvDataConversion.py --includeGroups %s --machines "%s" --machinesAreClusters --fixedPrevious --date "%s" -f rx --language %s' %( typeParameters[type], clusters, self.timeOfRequest, self.outputLanguage ) ) #print paths.STATSBIN + 'csvDataConversion.py --includeGroups %s --machines "%s" --machinesAreClusters --fixedPrevious --date "%s" -f rx --language %s' %( typeParameters[type], clusters, self.timeOfRequest, self.outputLanguage ) output = commands.getoutput( paths.STATSBIN + 'csvDataConversion.py --includeGroups %s --machines "%s" --machinesAreClusters --fixedPrevious --date "%s" -f tx --language %s' %( typeParameters[type], clusters, self.timeOfRequest, self.outputLanguage ) ) #print paths.STATSBIN + 'csvDataConversion.py --includeGroups %s --machines "%s" --machinesAreClusters --fixedPrevious --date "%s" -f tx --language %s' %( typeParameters[type], clusters, self.timeOfRequest, self.outputLanguage ) fileName = self.__getFileNameFromExecutionOutput(output) if fileName != "": commands.getstatusoutput(paths.STATSWEBPAGESGENERATORS + 'csvDataFiltersForWebPages.py -c %s -f %s ' %(cost, fileName) )
def restoreDatabaseUpdateTimes( timeToRestore, currentTime, nbBackupsToKeep ): """ @summary : Copy all databases into a folder sporting the data of the backup. @param timeToRestore : Time of the DB backups to set as current DB. @param currentTime : Time of the call to the script. @param nbBackupsToKeep : total number of backups to keep. """ statsPaths = StatsPaths() statsPaths.setPaths() source = statsPaths.STATSDBUPDATESBACKUPS + "/%s" %timeToRestore destination = statsPaths.STATSCURRENTDBUPDATES #Archive current Database backupRRDDatabases.backupDatabaseUpdateTimes( currentTime, nbBackupsToKeep, foldersToPreserve = [ source ] ) #restore desired status, output = commands.getstatusoutput( "rm -r %s" %( destination ) ) os.makedirs(destination) status, output = commands.getstatusoutput( "cp -rf %s/* %s" %( source, destination ) ) print output
def saveList( self, user, clients ): """ @summary : Saves list. @note : Will include modification made in updateFileInlist method @param clients : Client to wich the file is related(used to narrow down searchs) @param user : Name of the client, person, etc.. wich has a relation with the file. """ statsPaths = StatsPaths() statsPaths.setPaths() directory = statsPaths.STATSDATA + "fileAccessVersions/" combinedName = "" for client in clients: combinedName = combinedName + client fileName = combinedName + "_" + user if not os.path.isdir( directory ): os.makedirs( directory, mode=0777 ) #create directory completeFilename = directory + fileName #print "saving %s" %completeFilename CpickleWrapper.save( object = self.savedFileList, filename = completeFilename )
def transferLogFiles(): """ @summary : Log files will not be tansferred if local machine is not designed to be a pickling machine. If log files are to be transferred, they will be straight from the source." """ paths = StatsPaths() paths.setPaths() parameters = StatsConfigParameters() machineParameters = MachineConfigParameters() machineParameters.getParametersFromMachineConfigurationFile() parameters.getAllParameters() individualSourceMachines = machineParameters.getMachinesAssociatedWithListOfTags( parameters.sourceMachinesTags ) individualPicklingMachines = machineParameters.getMachinesAssociatedWithListOfTags( parameters.picklingMachines ) for sourceMachine,picklingMachine in map( None, individualSourceMachines, individualPicklingMachines ) : if picklingMachine == LOCAL_MACHINE :#pickling to be done here userName = machineParameters.getUserNameForMachine(sourceMachine) remoteLogPath = paths.getPXPathFromMachine( paths.PXLOG, sourceMachine, userName ) print "rsync -avzr --delete-before -e ssh %s@%s:%s %s%s/ " %( userName , sourceMachine,remoteLogPath , paths.STATSLOGS, sourceMachine ) output = commands.getoutput( "rsync -avzr --delete-before -e ssh %s@%s:%s %s%s/ " %( userName , sourceMachine, remoteLogPath, paths.STATSLOGS, sourceMachine ) ) print output
def __getDocFilesToLinkTo(self, language): """ @summary : Gathers and returns all the documentation files currently available @summary : The list of fileNames to link to. """ filesToLinkTo = [] statsPaths = StatsPaths() statsPaths.setPaths( self.mainLanguage ) folder = statsPaths.STATSDOC + "html/" listOfFilesInFolder = os.listdir(folder) for file in listOfFilesInFolder: baseName = os.path.basename(file) if( fnmatch.fnmatch( baseName, "*_%s.html"%(language) ) ): filesToLinkTo.append( baseName ) filesToLinkTo.sort() return filesToLinkTo
def __getAutomaticUpdatesDoneDuringTimeSpan( self, startTime, endtime ): """ @param startTime: Start time of the span in iso format @param endtime: end time of the span in iso format """ #set to fit file standard startTime = startTime.replace( " ", "_" ) endtime = endtime.replace( " ", "_" ) def afterEndTime(x): return x <= endtime def beforeStartTime(x): return x >= startTime paths = StatsPaths() paths.setPaths() updates = os.listdir( updatesDirectory = paths.STATSTEMPAUTUPDTLOGS + self.updateType + "/" ) updates = filter( afterEndTime, updates) updates = filter( beforeStartTime, updates) return updates
def updateWordsFromDB(wordType, word, language): """ @summary: Updates words within the db depending on the specified type ofdatabases @param wordType : Type of word : "products" or "groupName" @parameter language : Language that is currently used by the caller. @param word : Word to add to the database @return : None """ _ = LanguageTools.getTranslatorForModule(CURRENT_MODULE_ABS_PATH, language) statsPaths = StatsPaths() statsPaths.setPaths(language) if wordType == "products": updateWordsFromFile(statsPaths.STATSWEBWORDDATABASES + _('products'), word) elif wordType == "groupName": updateWordsFromFile(statsPaths.STATSWEBWORDDATABASES + _('groupNames'), word)
def getTimeOfLastUpdateInLogs(self): """ @summary : Returns the time of the last update in iso format. @return : None if no update as found, EPCH is returned in iso format, as to make sure an update is made since no prior updates exist. """ timeOfLastUpdate = StatsDateLib.getIsoTodaysMidnight( StatsDateLib.getCurrentTimeInIsoformat() ) paths = StatsPaths() paths.setPaths() updatesDirectory = paths.STATSTEMPAUTUPDTLOGS + self.updateType + "/" if not os.path.isdir( updatesDirectory ): os.makedirs(updatesDirectory) allEntries = os.listdir(updatesDirectory) if allEntries !=[] : allEntries.sort() allEntries.reverse() timeOfLastUpdate = os.path.basename( allEntries[0] ).replace( "_"," " ) return timeOfLastUpdate
def cleanUp( configParameters, currentTime, daysOfPicklesToKeep ): """ @summary: Based on current time and frequencies contained within the time parameters, we will run the cleaners that need to be run. @param configParameters: StatsConfigParameters instance. @param currenTime: currentTime in seconds since epoch format. """ paths = StatsPaths() paths.setPaths() updateManager = AutomaticUpdatesManager(configParameters.nbAutoUpdatesLogsToKeep, "picklecleaner") if updateManager.updateIsRequired(currentTime) : output = commands.getoutput( paths.STATSTOOLS + "pickleCleaner.py %s" %int(daysOfPicklesToKeep) ) print paths.STATSTOOLS + "pickleCleaner.py" + " " + str( daysOfPicklesToKeep ) updateManager.addAutomaticUpdateToLogs( currentTime ) updateManager = AutomaticUpdatesManager(configParameters.nbAutoUpdatesLogsToKeep, "generalCleaner") if updateManager.updateIsRequired(currentTime) : commands.getstatusoutput( paths.STATSTOOLS + "clean_dir.plx" + " " + paths.STATSETC + "clean_dir.conf" ) print paths.STATSTOOLS + "clean_dir.plx" + " " + paths.STATSETC + "clean_dir.conf" updateManager.addAutomaticUpdateToLogs( currentTime )
def updatePickledTimes( dateToSet = "2006-10-23 09:00:00" ): """ @summary : Get all the keys then set all of them to the desired date. """ statsPaths = StatsPaths() statsPaths.setPaths() folder = statsPaths.STATSPICKLESTIMEOFUPDATES files = os.listdir(folder) for fileName in files : if os.path.isfile( fileName ): fileHandle = open( fileName, "r" ) pickledTimes = pickle.load( fileHandle ) fileHandle.close() keys = pickledTimes.keys() for key in keys: pickledTimes[key] = dateToSet fileHandle = open( fileName, "w" ) pickle.dump( pickledTimes, fileHandle ) fileHandle.close()
def getGroupSettingsFromConfigurationFile(self): """ Reads all the group settings from the configuration file. """ groupParameters = GroupConfigParameters([], {}, {}, {}, {}) machineParameters = MachineConfigParameters() machineParameters.getParametersFromMachineConfigurationFile() paths = StatsPaths() paths.setBasicPaths() config = paths.STATSETC + "config" fileHandle = open(config, "r") line = fileHandle.readline() #read until groups section, or EOF while line != "" and "[specialGroups]" not in line: line = fileHandle.readline() if line != "": #read until next section, or EOF line = fileHandle.readline() while line != "" and "[" not in line: if line != '\n' and line[0] != '#': splitLine = line.split() if len(splitLine) == 6: groupName = splitLine[0] if groupName not in (groupParameters.groups): groupParameters.groups.append(groupName) groupParameters.groupsMachines[groupName] = [] groupParameters.groupFileTypes[groupName] = [] groupParameters.groupsMembers[groupName] = [] groupParameters.groupsProducts[groupName] = [] machines = splitLine[2].split(",") for machine in machines: groupParameters.groupsMachines[ groupName].extend( machineParameters. getMachinesAssociatedWith(machine)) groupParameters.groupFileTypes[ groupName] = splitLine[3] groupParameters.groupsMembers[groupName].extend( splitLine[4].split(",")) groupParameters.groupsProducts[groupName].extend( splitLine[5].split(",")) line = fileHandle.readline() fileHandle.close() self.groupParameters = groupParameters
def __init__( self, displayedLanguage = 'en', filesLanguage='en', days = None, \ weeks = None, months = None, years = None, \ pathsTowardsGraphics = None, pathsTowardsOutputFiles = None ): """ @summary : Constructor @param displayedLanguage: Languages in which to display the different captions found within the generated web page. @param fileLanguages: Language in which the files that will be referenced within this page have been generated. @param days : List of days that the web page covers. @note : Will set two global translators to be used throughout this module _ which translates every caption that is to be printed. _F which translates every filename that is to be linked. """ configParameters = StatsConfigParameters() configParameters.getGeneralParametersFromStatsConfigurationFile() global _ _ = self.getTranslatorForModule( CURRENT_MODULE_ABS_PATH, displayedLanguage ) if days == None: self.setDays() else: self.days = days if weeks == None: self.setWeeks() else: self.weeks = weeks if months == None: self.setMonths() else: self.months = months if years == None: self.setYears() else: self.years = years self.displayedLanguage = displayedLanguage self.filesLanguage = filesLanguage self.pathsTowardsGraphics = StatsPaths() self.pathsTowardsGraphics.setPaths( filesLanguage ) self.pathsTowardsOutputFiles = StatsPaths() self.pathsTowardsOutputFiles.setPaths( self.displayedLanguage ) StatsDateLib.setLanguage(filesLanguage)
def getGeneralParametersFromStatsConfigurationFile(self): """ @summary : Gathers GENERAL parameters from the StatsPath.STATSETC/config file. @note : Does not set groupParameters, time parameters and detailed parameters. @return : None """ paths = StatsPaths() paths.setBasicPaths() configFile = paths.STATSETC + "config" config = ConfigParser() file = open(configFile) config.readfp(file) self.sourceMachinesTags = [] self.picklingMachines = [] self.machinesToBackupInDb = [] self.graphicsUpLoadMachines = [] self.artifactsLanguages = [] self.webPagesLanguages = [] self.mainApplicationLanguage = config.get('generalConfig', 'mainApplicationLanguage') self.artifactsLanguages.extend( config.get('generalConfig', 'artifactsLanguages').split(',')) languagePairs = config.get('generalConfig', 'webPagesLanguages').split(',') for languagePair in languagePairs: self.webPagesLanguages.append( (languagePair.split(":")[0], languagePair.split(":")[1])) self.statsRoot = config.get('generalConfig', 'statsRoot') self.sourceMachinesTags.extend( config.get('generalConfig', 'sourceMachinesTags').split(',')) self.picklingMachines.extend( config.get('generalConfig', 'picklingMachines').split(',')) self.machinesToBackupInDb.extend( config.get('generalConfig', 'machinesToBackupInDb').split(',')) self.graphicsUpLoadMachines.extend( config.get('generalConfig', 'graphicsUpLoadMachines').split(',')) self.daysOfPicklesToKeep = float( config.get('generalConfig', 'daysOfPicklesToKeep')) self.nbDbBackupsToKeep = float( config.get('generalConfig', 'nbDbBackupsToKeep')) self.nbAutoUpdatesLogsToKeep = int( config.get('generalConfig', 'nbAutoUpdatesLogsToKeep')) try: file.close() except: pass
def getTranslationFileName(language='en', moduleAbsPath='module'): """ @summary : Returns the filename containing the translation text required by the specified module for the spcified language. @Note : Will return "" if language is not supported. @param language: Language for which we need the translation file. @param moduleAbsPath: AbsolutePath name of the module for which we need the translation file. @return : Returns the filename containing the translation text required by the specified module for the spcified language. Will return "" if language is not supported. """ translationfileName = "" moduleAbsPath = os.path.realpath(moduleAbsPath) #decodes symlinks. try: paths = StatsPaths() paths.setBasicPaths() if language == 'en': correspondingPaths = { paths.STATSBIN : paths.STATSLANGENBIN, paths.STATSDEBUGTOOLS : paths.STATSLANGENBINDEBUGTOOLS \ , paths.STATSTOOLS : paths.STATSLANGENBINTOOLS, paths.STATSWEBPAGESGENERATORS : paths.STATSLANGENBINWEBPAGES \ , paths.STATSLIB : paths.STATSLANGENLIB } elif language == 'fr': correspondingPaths = { paths.STATSBIN : paths.STATSLANGFRBIN, paths.STATSDEBUGTOOLS : paths.STATSLANGFRBINDEBUGTOOLS \ , paths.STATSTOOLS : paths.STATSLANGFRBINTOOLS, paths.STATSWEBPAGESGENERATORS : paths.STATSLANGFRBINWEBPAGES \ , paths.STATSLIB : paths.STATSLANGFRLIB } for key in correspondingPaths.keys(): correspondingPaths[key.split("pxStats")[-1:] [0]] = correspondingPaths[key] modulePath = str(os.path.dirname(moduleAbsPath) + '/').split("pxStats")[-1:][0] moduleBaseName = str(os.path.basename(moduleAbsPath)).replace( ".py", "") #print "modulePath",modulePath #print "correspondingPaths", correspondingPaths translationfileName = correspondingPaths[ modulePath] + moduleBaseName #print translationfileName except Exception, instance: print instance
def updateCsvFiles(): """ @summary : Runs the csv file update utility. """ paths = StatsPaths() paths.setPaths() output = commands.getoutput( "%sgetCsvFilesforWebPages.py" %paths.STATSWEBPAGESGENERATORS )
def __init__( self, directory, fileType, clientNames = None , groupName = "", timespan = 12,\ currentTime = None, productTypes = None, logger = None, logging = True, machines = None,\ workingLanguage = None, outputLanguage = None ): """ @summary : ClientGraphicProducer constructor. CurrentTime format is ISO meaning "2006-06-8 00:00:00". Will use current system time by default. CurrentTime is to be used if a different time than sytem time is to be used. Very usefull for testing or to implement graphic request where user can choose start time. """ global _ _ = self.getTranslatorForModule(CURRENT_MODULE_ABS_PATH, workingLanguage) if currentTime != None: currentTime = currentTime else: currentTime = time.time() self.directory = directory # Directory where log files are located. self.fileType = fileType # Type of log files to be used. self.machines = machines or [] # Machines for wich to collect data. self.clientNames = clientNames or [ ] # Client name we need to get the data from. self.groupName = groupName # Name for a group of clients to be combined. self.timespan = timespan # Number of hours we want to gather the data from. self.currentTime = currentTime # Time when stats were queried. self.productTypes = productTypes or [ "All" ] # Specific data types on wich we'll collect the data. self.loggerName = 'graphs' # Name of the logger self.logger = logger # Logger to use is logging == true. self.logging = logging # Whether or not to enable logging. self.outputLanguage = outputLanguage # Language in which the graphic will be produced in. paths = StatsPaths() paths.setPaths() if logging == True: if self.logger is None: # Enable logging if not os.path.isdir(paths.STATSLOGGING): os.makedirs(paths.STATSLOGGING, mode=0777) self.logger = Logger( paths.STATSLOGGING + 'stats_' + self.loggerName + '.log.notb', 'INFO',\ 'TX' + self.loggerName, bytes = 10000000 ) self.logger = self.logger.getLogger() else: self.logger = None
def getGroupSettingsFromConfigurationFile( self ): """ Reads all the group settings from the configuration file. """ groupParameters = GroupConfigParameters([], {}, {}, {},{} ) machineParameters = MachineConfigParameters() machineParameters.getParametersFromMachineConfigurationFile() paths = StatsPaths() paths.setBasicPaths() config = paths.STATSETC + "config" fileHandle = open( config, "r" ) line = fileHandle.readline()#read until groups section, or EOF while line != "" and "[specialGroups]" not in line: line = fileHandle.readline() if line != "":#read until next section, or EOF line = fileHandle.readline() while line != "" and "[" not in line: if line != '\n' and line[0] != '#' : splitLine = line.split() if len( splitLine ) == 6: groupName = splitLine[0] if groupName not in (groupParameters.groups): groupParameters.groups.append( groupName ) groupParameters.groupsMachines[groupName] = [] groupParameters.groupFileTypes[groupName] = [] groupParameters.groupsMembers[groupName] = [] groupParameters.groupsProducts[groupName] = [] machines = splitLine[2].split(",") for machine in machines: groupParameters.groupsMachines[groupName].extend( machineParameters.getMachinesAssociatedWith(machine) ) groupParameters.groupFileTypes[groupName] = splitLine[3] groupParameters.groupsMembers[groupName].extend( splitLine[4].split(",") ) groupParameters.groupsProducts[groupName].extend( splitLine[5].split(",") ) line = fileHandle.readline() fileHandle.close() self.groupParameters = groupParameters
def updateCsvFiles(): """ @summary : Runs the csv file update utility. """ paths = StatsPaths() paths.setPaths() output = commands.getoutput("%sgetCsvFilesforWebPages.py" % paths.STATSWEBPAGESGENERATORS)
def getTranslationFileName( language = 'en', moduleAbsPath = 'module' ): """ @summary : Returns the filename containing the translation text required by the specified module for the spcified language. @Note : Will return "" if language is not supported. @param language: Language for which we need the translation file. @param moduleAbsPath: AbsolutePath name of the module for which we need the translation file. @return : Returns the filename containing the translation text required by the specified module for the spcified language. Will return "" if language is not supported. """ translationfileName = "" moduleAbsPath = os.path.realpath(moduleAbsPath) #decodes symlinks. try : paths = StatsPaths() paths.setBasicPaths() if language == 'en' : correspondingPaths = { paths.STATSBIN : paths.STATSLANGENBIN, paths.STATSDEBUGTOOLS : paths.STATSLANGENBINDEBUGTOOLS \ , paths.STATSTOOLS : paths.STATSLANGENBINTOOLS, paths.STATSWEBPAGESGENERATORS : paths.STATSLANGENBINWEBPAGES \ , paths.STATSLIB : paths.STATSLANGENLIB } elif language == 'fr': correspondingPaths = { paths.STATSBIN : paths.STATSLANGFRBIN, paths.STATSDEBUGTOOLS : paths.STATSLANGFRBINDEBUGTOOLS \ , paths.STATSTOOLS : paths.STATSLANGFRBINTOOLS, paths.STATSWEBPAGESGENERATORS : paths.STATSLANGFRBINWEBPAGES \ , paths.STATSLIB : paths.STATSLANGFRLIB } for key in correspondingPaths.keys(): correspondingPaths[ key.split("pxStats")[-1:][0]] = correspondingPaths[ key] modulePath = str(os.path.dirname( moduleAbsPath ) + '/').split("pxStats")[-1:][0] moduleBaseName = str(os.path.basename( moduleAbsPath )).replace( ".py", "" ) #print "modulePath",modulePath #print "correspondingPaths", correspondingPaths translationfileName = correspondingPaths[ modulePath ] + moduleBaseName #print translationfileName except Exception, instance: print instance
def giveOutPermissionsToFolders(currentlyUsedLanguages): """ @summary : opens up permissions to folders that might be required by the web user. @param currentlyUsedLanguages: Languages currently set to be displayed in the web interface """ for language in currentlyUsedLanguages: _ = LanguageTools.getTranslatorForModule(CURRENT_MODULE_ABS_PATH, language) paths = StatsPaths() paths.setPaths(language) pathsToOpenUp = [] pathsToOpenUp.append(paths.STATSLOGGING) pathsToOpenUp.append(paths.STATSPICKLES) pathsToOpenUp.append(paths.STATSDB) pathsToOpenUp.append(paths.STATSCURRENTDB) pathsToOpenUp.append(paths.STATSCURRENTDB + _("bytecount")) pathsToOpenUp.append(paths.STATSCURRENTDB + _("errors")) pathsToOpenUp.append(paths.STATSCURRENTDB + _("filecount")) pathsToOpenUp.append(paths.STATSCURRENTDB + _("filesOverMaxLatency")) pathsToOpenUp.append(paths.STATSCURRENTDB + _("latency")) pathsToOpenUp.append(paths.STATSCURRENTDBUPDATES) pathsToOpenUp.append(paths.STATSCURRENTDBUPDATES + _("rx")) pathsToOpenUp.append(paths.STATSCURRENTDBUPDATES + _("tx")) pathsToOpenUp.append(paths.STATSCURRENTDBUPDATES + _("totals")) pathsToOpenUp.append(paths.STATSDBBACKUPS) pathsToOpenUp.append(paths.STATSDBBACKUPS + "*/" + _("rx")) pathsToOpenUp.append(paths.STATSDBBACKUPS + "*/" + _("tx")) pathsToOpenUp.append(paths.STATSDBBACKUPS + "*/" + _("totals")) pathsToOpenUp.append(paths.STATSGRAPHS) pathsToOpenUp.append(paths.STATSGRAPHS + _("others/")) pathsToOpenUp.append(paths.STATSGRAPHS + _("others/") + "gnuplot/") pathsToOpenUp.append(paths.STATSGRAPHS + _("others/") + "rrd/") pathsToOpenUp.append(paths.STATSWEBPAGESHTML + "/popUps/") for path in pathsToOpenUp: if not os.path.isdir(path): os.makedirs(path, 0777) commands.getstatusoutput("chmod 0777 %s" % path) commands.getstatusoutput("chmod 0777 %s/*" % path)
def giveOutPermissionsToFolders( currentlyUsedLanguages ): """ @summary : opens up permissions to folders that might be required by the web user. @param currentlyUsedLanguages: Languages currently set to be displayed in the web interface """ for language in currentlyUsedLanguages: _ = LanguageTools.getTranslatorForModule(CURRENT_MODULE_ABS_PATH, language) paths = StatsPaths() paths.setPaths(language) pathsToOpenUp = [] pathsToOpenUp.append( paths.STATSLOGGING) pathsToOpenUp.append( paths.STATSPICKLES ) pathsToOpenUp.append( paths.STATSDB) pathsToOpenUp.append( paths.STATSCURRENTDB ) pathsToOpenUp.append( paths.STATSCURRENTDB + _("bytecount") ) pathsToOpenUp.append( paths.STATSCURRENTDB + _("errors") ) pathsToOpenUp.append( paths.STATSCURRENTDB + _("filecount") ) pathsToOpenUp.append( paths.STATSCURRENTDB + _("filesOverMaxLatency")) pathsToOpenUp.append( paths.STATSCURRENTDB + _("latency")) pathsToOpenUp.append( paths.STATSCURRENTDBUPDATES) pathsToOpenUp.append( paths.STATSCURRENTDBUPDATES + _("rx") ) pathsToOpenUp.append( paths.STATSCURRENTDBUPDATES + _("tx") ) pathsToOpenUp.append( paths.STATSCURRENTDBUPDATES + _("totals") ) pathsToOpenUp.append( paths.STATSDBBACKUPS ) pathsToOpenUp.append( paths.STATSDBBACKUPS + "*/" + _("rx") ) pathsToOpenUp.append( paths.STATSDBBACKUPS + "*/" + _("tx") ) pathsToOpenUp.append( paths.STATSDBBACKUPS + "*/" + _("totals") ) pathsToOpenUp.append( paths.STATSGRAPHS ) pathsToOpenUp.append( paths.STATSGRAPHS +_("others/")) pathsToOpenUp.append( paths.STATSGRAPHS +_("others/") + "gnuplot/") pathsToOpenUp.append( paths.STATSGRAPHS +_("others/") + "rrd/") pathsToOpenUp.append( paths.STATSWEBPAGESHTML + "/popUps/") for path in pathsToOpenUp: if not os.path.isdir(path): os.makedirs(path, 0777) commands.getstatusoutput( "chmod 0777 %s" %path ) commands.getstatusoutput( "chmod 0777 %s/*" %path )
def buildCsvFileName(infos): """ @summary: Builds and returns the file name to use for the csv file. @param infos: _CvsInfos instance containing the required information to build up the file name. @return: Return the built up file name. """ global _ StatsDateLib.setLanguage(infos.outputLanguage) paths = StatsPaths() paths.setPaths(infos.outputLanguage) machinesStr = str(infos.machinesForLabels).replace('[', '').replace( ']', '').replace(',', '').replace("'", "").replace('"', '').replace(' ', '') currentYear, currentMonth, currentDay = StatsDateLib.getYearMonthDayInStrfTime( StatsDateLib.getSecondsSinceEpoch(infos.start)) currentWeek = time.strftime( "%W", time.gmtime(StatsDateLib.getSecondsSinceEpoch(infos.start))) fileName = paths.STATSCSVFILES if infos.span == "daily": fileName = fileName + "/" + _( "daily/") + infos.fileType + "/%s/%s/%s/%s.csv" % ( machinesStr, currentYear, currentMonth, currentDay) elif infos.span == "weekly": fileName = fileName + "/" + _( "weekly/") + infos.fileType + "/%s/%s/%s.csv" % ( machinesStr, currentYear, currentWeek) elif infos.span == "monthly": fileName = fileName + "/" + _( "monthly/") + infos.fileType + "/%s/%s/%s.csv" % ( machinesStr, currentYear, currentMonth) elif infos.span == "yearly": fileName = fileName + "/" + _( "yearly/") + infos.fileType + "/%s/%s.csv" % (machinesStr, currentYear) StatsDateLib.setLanguage(LanguageTools.getMainApplicationLanguage()) return fileName
def main(): """ Parses parameters then calls the tranferMethod. """ setGlobalLanguageParameters() paths = StatsPaths() paths.setPaths() login = "" machine = "" if len( sys.argv ) == 3 : login = sys.argv[1] machine = sys.argv[2] transfer( login, machine ) else: print _( "#######################################################################################################" ) print _( "#" ) print _( "# Help for retreiveDataFromMachine.py" ) print _( "#" ) print _( "# This program is to be used to transfer all of the important stats files" ) print _( "# from a remote machine to the local machine." ) print _( "#" ) print _( "# If large transfers are to be done, program may take many hours to complete." ) print _( "# Output from every operation will be printed as to let the user see exactly what is going on" ) print _( "# and allow errors to be detected." ) print _( "#" ) print _( "# Examples : invalid login or ssh other ssh errors" ) print _( "#" ) print _( "# This will also serve as to take out the guesswork as to why the program is taking so long to complete." ) print _( "#" ) print _( "# Log files will not be tansferred if local machine is not designed to be a pickling machine. " ) print _( "# If log files are to be transferred, they will be transferred straight from the source." ) print _( "#" ) print _( "# *** Make sure %sconfig is filled properly prior to running this script !!! ***") %paths.STATSETC print _( "# *** Consult pxStats documentation if you are unsure how to fill %sconfig. ***") %paths.STATSETC print _( "#" ) print _( "#" ) print _( "#" ) print _( "# Usage : Program must receive exactly two arguments." ) print _( "#" ) print _( "# Example : python retreiveDataFromMachine.py login machineName " ) print _( "#" ) print _( "#######################################################################################################") print "" print "" print "" sys.exit()
def __init__( self, directory, fileType, clientNames = None , groupName = "", timespan = 12,\ currentTime = None, productTypes = None, logger = None, logging = True, machines = None,\ workingLanguage = None, outputLanguage = None ): """ @summary : ClientGraphicProducer constructor. CurrentTime format is ISO meaning "2006-06-8 00:00:00". Will use current system time by default. CurrentTime is to be used if a different time than sytem time is to be used. Very usefull for testing or to implement graphic request where user can choose start time. """ global _ _ = self.getTranslatorForModule( CURRENT_MODULE_ABS_PATH, workingLanguage ) if currentTime != None : currentTime = currentTime else: currentTime = time.time() self.directory = directory # Directory where log files are located. self.fileType = fileType # Type of log files to be used. self.machines = machines or [] # Machines for wich to collect data. self.clientNames = clientNames or [] # Client name we need to get the data from. self.groupName = groupName # Name for a group of clients to be combined. self.timespan = timespan # Number of hours we want to gather the data from. self.currentTime = currentTime # Time when stats were queried. self.productTypes = productTypes or ["All"] # Specific data types on wich we'll collect the data. self.loggerName = 'graphs' # Name of the logger self.logger = logger # Logger to use is logging == true. self.logging = logging # Whether or not to enable logging. self.outputLanguage = outputLanguage # Language in which the graphic will be produced in. paths = StatsPaths() paths.setPaths() if logging == True: if self.logger is None: # Enable logging if not os.path.isdir( paths.STATSLOGGING ): os.makedirs( paths.STATSLOGGING , mode=0777 ) self.logger = Logger( paths.STATSLOGGING + 'stats_' + self.loggerName + '.log.notb', 'INFO',\ 'TX' + self.loggerName, bytes = 10000000 ) self.logger = self.logger.getLogger() else: self.logger = None
def getLocalPathsToSynchronise(): """ @summary : Returns the list of local paths that are required to with the remote machine we want to be synchronised with. """ statsPaths = StatsPaths() statsPaths.setPaths() paths = [ statsPaths.STATSMONITORING , statsPaths.STATSGRAPHS , statsPaths.STATSPICKLES, statsPaths.STATSCURRENTDB,\ statsPaths.STATSDBBACKUPS , statsPaths.STATSCURRENTDBUPDATES, statsPaths.STATSDBUPDATESBACKUPS ] return paths
def getCrontabLine(self, attribute, attributeValue ): """ @param attribute: attribute for wich you want to build a crontab line. @return: a crontab based on the program associated with the attribute and the frequency that was specified. """ paths = StatsPaths() paths.setBasicPaths() crontabArray = ['*','*','*','*','*',''] frequency = attributeValue.keys()[0] timeUnit = attributeValue[ frequency ] if timeUnit in TimeConfigParameters.validTimeUnits: if timeUnit != 'minutes': if attribute == "pxStatsFrequency": crontabArray[0] = random.randint(1,10) else: crontabArray[0] = random.randint(45,59) indexToModify = TimeConfigParameters.validTimeUnits.index( timeUnit ) crontabArray[indexToModify] = crontabArray[indexToModify] + '/' + str(frequency) if attribute == "pxStatsFrequency" : crontabArray[5] = paths.STATSLIBRARY + 'pxStats.py' elif attribute == "monitoringFrequency" : crontabArray[5] = paths.STATSLIBRARY + 'statsMonitor.py' elif attribute == "dbBackupsFrequency" : crontabArray[5] = paths.STATSLIBRARY + 'backupRRDDatabases.py' elif attribute == "pickleCleanerFrequency" : crontabArray[5] = paths.STATSLIBRARY + 'pickleCleaner.py' elif attribute == "generalCleanerFrequency" : crontabArray[5] = paths.STATSLIBRARY + 'clean_dir.plx' crontabLine= "" for item in crontabArray: crontabLine = crontabLine + str(item) + " " else: crontabLine = "" return crontabLine
def __init__(self, timeOfRequest, outputLanguage): """ @param timeOfRequest : Time at which the graphics are requested. @param outputLanguage : Language in which to output the graphics. """ self.timeOfRequest = timeOfRequest self.outputLanguage = outputLanguage self.paths = StatsPaths() self.paths.setPaths()
def uploadGraphicFiles( parameters, machineParameters ): """ @summary : Takes all the created daily graphics dedicated to clumbo and uploads them to the machines specified in the parameters. """ paths = StatsPaths() paths.setPaths() for uploadMachine in parameters.graphicsUpLoadMachines : output = commands.getoutput( "scp %s* %s@%s:%s " %( paths.STATSCOLGRAPHS, machineParameters.getUserNameForMachine(uploadMachine), uploadMachine, paths.PDSCOLGRAPHS ) ) print "scp %s* %s@%s:%s " %( paths.STATSCOLGRAPHS, machineParameters.getUserNameForMachine(uploadMachine),uploadMachine, paths.PDSCOLGRAPHS ) print output
def getParametersFromMachineConfigurationFile(self): ''' @summary: Gathers all the information found within the configForMachines configuration file. @return: Returns an _MachineConfigParameters instance containing all the found parameters. ''' paths = StatsPaths() paths.setBasicPaths() CONFIG = paths.STATSETC + "configForMachines" if os.path.isfile( CONFIG ): fileHandle = open( CONFIG ) lines = fileHandle.readlines() for line in lines: if line != '' and line[0] != '#' and line[0] != '\n' : splitLine = line.split() machineTag = splitLine[0] machines = splitLine[1].split(",") userNames = splitLine[2].split(",") if ( len(machines) == len(userNames) ): for i in range( len( machines ) ): self.addMachineTagToTagList( machineTag) self.addMachineToMachineList( machines[i] ) self.addMachineToMachineTag(machines[i], machineTag) self.setUserNameForMachine(machines[i], userNames[i]) fileHandle.seek(0) fileHandle.close()
def updateDatabases( parameters, machineParameters, currentTimeInIsoFormat ): """ @summary : Updates all the required databases by transferring the data found in the pickle files into rrd databases files. First transfers all the pickles into databases for all the clusters. Then combines all the data required by the different groups found within the config file. @param parameters: StatsConfigParameters instance containing the parameters found in the config file. @param machineParameters: MachineConfigParameters instance containing the parameters found in the config file. @param currentTimeInIsoFormat : Time at which this program was originally called. @return : None """ paths = StatsPaths() paths.setPaths() #Small safety measure in case another instance of the program is allready running. if transferToDatabaseAlreadyRunning() == False : for tag in parameters.machinesToBackupInDb : machines = machineParameters.getMachinesAssociatedWith(tag) machines = str( machines ).replace( "[", "" ).replace( "]", "" ).replace( " ", "" ) output = commands.getoutput( "%stransferPickleToRRD.py -m '%s' -e '%s' " %( paths.STATSBIN, machines, currentTimeInIsoFormat ) ) print "%stransferPickleToRRD.py -m '%s' " %( paths.STATSBIN, machines ) print "output:%s" %output if parameters.groupParameters.groups != []: for group in parameters.groupParameters.groups : groupMembers = str( parameters.groupParameters.groupsMembers[group]).replace( "[", "" ).replace( "]", "" ).replace( " ", "" ) groupMachines = str( parameters.groupParameters.groupsMachines[group] ).replace( "[", "" ).replace( "]", "" ).replace( " ", "" ) groupProducts = str( parameters.groupParameters.groupsProducts[group] ).replace( "[", "" ).replace( "]", "" ).replace( " ", "" ) groupFileTypes = str(parameters.groupParameters.groupFileTypes[group]).replace( "[", "" ).replace( "]", "" ).replace( " ", "" ) output = commands.getoutput( "%stransferPickleToRRD.py -c '%s' -m '%s' -g '%s' -f %s -p '%s' -e '%s' " %( paths.STATSBIN, groupMembers, groupMachines, group, groupFileTypes, groupProducts, currentTimeInIsoFormat ) ) print "%stransferPickleToRRD.py -c '%s' -m '%s' -g '%s' -f %s -p '%s' " %( paths.STATSBIN, groupMembers, groupMachines, group, groupFileTypes, groupProducts ) print output
def getCrontabLine(self, attribute, attributeValue): """ @param attribute: attribute for wich you want to build a crontab line. @return: a crontab based on the program associated with the attribute and the frequency that was specified. """ paths = StatsPaths() paths.setBasicPaths() crontabArray = ['*', '*', '*', '*', '*', ''] frequency = attributeValue.keys()[0] timeUnit = attributeValue[frequency] if timeUnit in TimeConfigParameters.validTimeUnits: if timeUnit != 'minutes': if attribute == "pxStatsFrequency": crontabArray[0] = random.randint(1, 10) else: crontabArray[0] = random.randint(45, 59) indexToModify = TimeConfigParameters.validTimeUnits.index(timeUnit) crontabArray[indexToModify] = crontabArray[ indexToModify] + '/' + str(frequency) if attribute == "pxStatsFrequency": crontabArray[5] = paths.STATSLIBRARY + 'pxStats.py' elif attribute == "monitoringFrequency": crontabArray[5] = paths.STATSLIBRARY + 'statsMonitor.py' elif attribute == "dbBackupsFrequency": crontabArray[5] = paths.STATSLIBRARY + 'backupRRDDatabases.py' elif attribute == "pickleCleanerFrequency": crontabArray[5] = paths.STATSLIBRARY + 'pickleCleaner.py' elif attribute == "generalCleanerFrequency": crontabArray[5] = paths.STATSLIBRARY + 'clean_dir.plx' crontabLine = "" for item in crontabArray: crontabLine = crontabLine + str(item) + " " else: crontabLine = "" return crontabLine
def getListOfPickleUpdateFiles(): """ @summary: Returns the list of currently available pickle update files @return: Returns the list of currently available pickle update files """ statsPaths = StatsPaths() statsPaths.setPaths() files = glob.glob(statsPaths.STATSPICKLESTIMEOFUPDATES + "*") return files
def saveCurrentMachineParameters(machineParameters): """ @summary : Saves the current machineParameters into the /data/previousMachineParameters file. @param machineParameters: Machine parameters to save. """ paths = StatsPaths() paths.setPaths() if not os.path.isdir(os.path.dirname(paths.STATSPREVIOUSMACHINEPARAMS)): os.makedirs(os.path.dirname(paths.STATSPREVIOUSMACHINEPARAMS)) CpickleWrapper.save(machineParameters, paths.STATSPREVIOUSMACHINEPARAMS)
def saveCurrentMachineParameters( machineParameters ): """ @summary : Saves the current machineParameters into the /data/previousMachineParameters file. @param machineParameters: Machine parameters to save. """ paths = StatsPaths() paths.setPaths() if not os.path.isdir( os.path.dirname( paths.STATSPREVIOUSMACHINEPARAMS ) ): os.makedirs( os.path.dirname(paths.STATSPREVIOUSMACHINEPARAMS) ) CpickleWrapper.save( machineParameters, paths.STATSPREVIOUSMACHINEPARAMS)
def getListOfPickleUpdateFiles(): """ @summary: Returns the list of currently available pickle update files @return: Returns the list of currently available pickle update files """ statsPaths = StatsPaths() statsPaths.setPaths() files = glob.glob( statsPaths.STATSPICKLESTIMEOFUPDATES + '*' ) return files
def getGeneralParametersFromStatsConfigurationFile(self): """ @summary : Gathers GENERAL parameters from the StatsPath.STATSETC/config file. @note : Does not set groupParameters, time parameters and detailed parameters. @return : None """ paths = StatsPaths() paths.setBasicPaths() configFile = paths.STATSETC + "config" config = ConfigParser() file = open( configFile ) config.readfp( file ) self.sourceMachinesTags = [] self.picklingMachines = [] self.machinesToBackupInDb = [] self.graphicsUpLoadMachines = [] self.artifactsLanguages = [] self.webPagesLanguages = [] self.mainApplicationLanguage = config.get( 'generalConfig', 'mainApplicationLanguage' ) self.artifactsLanguages.extend( config.get( 'generalConfig', 'artifactsLanguages' ).split(',') ) languagePairs = config.get( 'generalConfig', 'webPagesLanguages' ).split(',') for languagePair in languagePairs: self.webPagesLanguages.append( (languagePair.split(":")[0], languagePair.split(":")[1]) ) self.statsRoot = config.get( 'generalConfig', 'statsRoot' ) self.sourceMachinesTags.extend( config.get( 'generalConfig', 'sourceMachinesTags' ).split(',') ) self.picklingMachines.extend( config.get( 'generalConfig', 'picklingMachines' ).split(',') ) self.machinesToBackupInDb.extend( config.get( 'generalConfig', 'machinesToBackupInDb' ).split(',') ) self.graphicsUpLoadMachines.extend( config.get( 'generalConfig', 'graphicsUpLoadMachines' ).split(',') ) self.daysOfPicklesToKeep = float( config.get( 'generalConfig', 'daysOfPicklesToKeep' ) ) self.nbDbBackupsToKeep = float( config.get( 'generalConfig', 'nbDbBackupsToKeep' ) ) self.nbAutoUpdatesLogsToKeep = int( config.get( 'generalConfig', 'nbAutoUpdatesLogsToKeep' ) ) try: file.close() except: pass
def __updateCsvFiles(self, type, clusters, cost): """ @summary : Generate th rx and tx csv files for yesterday for all clusters. @param type : daily | weekly | monthly | yearly @param clusters : List of currently running source clusters. @param cost : total operational cost for the perido specified by the type @return : None """ paths = StatsPaths() paths.setPaths() typeParameters = { "daily": "-d", "weekly": "-w", "monthly": "-m", "yearly": "-y" } output = commands.getoutput( paths.STATSBIN + 'csvDataConversion.py --includeGroups %s --machines "%s" --machinesAreClusters --fixedPrevious --date "%s" -f rx --language %s' % (typeParameters[type], clusters, self.timeOfRequest, self.outputLanguage)) #print paths.STATSBIN + 'csvDataConversion.py --includeGroups %s --machines "%s" --machinesAreClusters --fixedPrevious --date "%s" -f rx --language %s' %( typeParameters[type], clusters, self.timeOfRequest, self.outputLanguage ) output = commands.getoutput( paths.STATSBIN + 'csvDataConversion.py --includeGroups %s --machines "%s" --machinesAreClusters --fixedPrevious --date "%s" -f tx --language %s' % (typeParameters[type], clusters, self.timeOfRequest, self.outputLanguage)) #print paths.STATSBIN + 'csvDataConversion.py --includeGroups %s --machines "%s" --machinesAreClusters --fixedPrevious --date "%s" -f tx --language %s' %( typeParameters[type], clusters, self.timeOfRequest, self.outputLanguage ) fileName = self.__getFileNameFromExecutionOutput(output) if fileName != "": commands.getstatusoutput( paths.STATSWEBPAGESGENERATORS + 'csvDataFiltersForWebPages.py -c %s -f %s ' % (cost, fileName))
def getConfigFiles(): """ @summary : Returns the list of config files to synchronise. @return : Returns the list of config files to synchronise. """ statsPaths = StatsPaths() statsPaths.setPaths() configFiles = [] configFiles.append(statsPaths.STATSMONITORING + 'maxSettings.conf') configFiles.append(statsPaths.STATSMONITORING + 'previousCrontab') configFiles.append(statsPaths.STATSMONITORING + 'previousFileChecksums') return configFiles
def getMachineParametersFromPreviousCall(): """ @summary: Gets the machine parameters that are saved in data/previousMachineParameters. @return: Returns the saved machine parameters. """ paths = StatsPaths() paths.setPaths() previousMachineParams = None if os.path.isfile(paths.STATSPREVIOUSMACHINEPARAMS): previousMachineParams = CpickleWrapper.load( paths.STATSPREVIOUSMACHINEPARAMS) return previousMachineParams
def getListOfFileAccessFiles(): """ @summary: Returns the list of file version pickle files currently found on the local machine. """ statsPaths = StatsPaths() statsPaths.setPaths() listOfFileAccessFiles = [] if os.path.isdir(statsPaths.STATSLOGACCESS) : listOfFileAccessFiles = os.listdir( statsPaths.STATSLOGACCESS ) listOfFileAccessFiles = filter( filterentriesStartingWithDots, listOfFileAccessFiles ) listOfFileAccessFiles = [ statsPaths.STATSLOGACCESS + file for file in listOfFileAccessFiles ] return listOfFileAccessFiles