Esempio n. 1
0
def cleanUp( configParameters, currentTime, daysOfPicklesToKeep ):
    """
    
        @summary: Based on current time and frequencies contained
                  within the time parameters, we will run 
                  the cleaners that need to be run.       
                            
        @param configParameters: StatsConfigParameters instance.
                               
        @param currenTime: currentTime in seconds since epoch format.
                                  
    """     
    
    paths = StatsPaths()
    paths.setPaths()
    
    updateManager = AutomaticUpdatesManager(configParameters.nbAutoUpdatesLogsToKeep, "picklecleaner")
    
    if updateManager.updateIsRequired(currentTime) :
        
        output = commands.getoutput( paths.STATSTOOLS + "pickleCleaner.py %s" %int(daysOfPicklesToKeep) )
        print paths.STATSTOOLS + "pickleCleaner.py" + " " + str( daysOfPicklesToKeep )
        updateManager.addAutomaticUpdateToLogs( currentTime )
        
    updateManager = AutomaticUpdatesManager(configParameters.nbAutoUpdatesLogsToKeep, "generalCleaner")    
    
    if updateManager.updateIsRequired(currentTime) :
        commands.getstatusoutput( paths.STATSTOOLS + "clean_dir.plx" + " " + paths.STATSETC + "clean_dir.conf"   )
        print paths.STATSTOOLS + "clean_dir.plx" + " " + paths.STATSETC + "clean_dir.conf" 
        updateManager.addAutomaticUpdateToLogs( currentTime )
 def getPreviousMonitoringJob( self, currentTime ):
     """
         @summary : Gets the previous crontab from the pickle file.
         
         @return : Time of the previous monitoring job.
         
         @warning : Returns "" if file does not exist.
         
     """     
     
     statsPaths = StatsPaths()
     statsPaths.setPaths()         
     
     file  = "%spreviousMonitoringJob" %statsPaths.STATSMONITORING
     previousMonitoringJob = ""
     
     if os.path.isfile( file ):
         fileHandle      = open( file, "r" )
         previousMonitoringJob = pickle.load( fileHandle )
         fileHandle.close()
         #print previousMonitoringJob
         
     else:
         previousMonitoringJob = StatsDateLib.getIsoTodaysMidnight( currentTime )
         
     #print previousMonitoringJob   
     
     return previousMonitoringJob        
    def __updateCsvFiles( self, type, clusters, cost ):
        """
        
            @summary    : Generate th rx and tx csv files
                          for yesterday for all clusters.
            
            @param type : daily | weekly | monthly | yearly 
             
            @param clusters :  List of currently running source clusters.

            @param cost : total operational cost for the perido specified 
                          by the type
            
            @return : None
    
        """   
        
        paths = StatsPaths()
        paths.setPaths()
        
        typeParameters = {  "daily" : "-d", "weekly" : "-w", "monthly" : "-m", "yearly" : "-y" }
        
        output = commands.getoutput( paths.STATSBIN + 'csvDataConversion.py --includeGroups %s --machines "%s" --machinesAreClusters --fixedPrevious --date "%s" -f rx --language %s'  %( typeParameters[type], clusters, self.timeOfRequest, self.outputLanguage ) )
        #print paths.STATSBIN + 'csvDataConversion.py --includeGroups %s --machines "%s" --machinesAreClusters --fixedPrevious --date "%s" -f rx --language %s'  %( typeParameters[type], clusters, self.timeOfRequest, self.outputLanguage )
           
        output = commands.getoutput( paths.STATSBIN + 'csvDataConversion.py --includeGroups %s --machines "%s" --machinesAreClusters --fixedPrevious --date "%s" -f tx --language %s'  %( typeParameters[type], clusters, self.timeOfRequest, self.outputLanguage ) )
        #print paths.STATSBIN + 'csvDataConversion.py --includeGroups %s --machines "%s" --machinesAreClusters --fixedPrevious --date "%s" -f tx --language %s'  %( typeParameters[type], clusters, self.timeOfRequest, self.outputLanguage )      
        
        fileName = self.__getFileNameFromExecutionOutput(output)
        
        if fileName != "":
            commands.getstatusoutput(paths.STATSWEBPAGESGENERATORS + 'csvDataFiltersForWebPages.py -c %s -f %s ' %(cost, fileName) )
 def addAutomaticUpdateToLogs( self, timeOfUpdateInIsoFormat, currentUpdateFrequency  = None ):
    """
        @summary : Writes a new file in the log folder containing 
                   the current update frequency. 
    
        @param timeOfUpdateInIsoFormat: Time that the entries name will sport.
    
    """
    
    paths = StatsPaths()
    paths.setPaths()
    fileName = paths.STATSTEMPAUTUPDTLOGS + self.updateType + "/" + str( timeOfUpdateInIsoFormat ).replace( " ", "_" )
    
    #Safety to make sure 
    if not os.path.isdir( os.path.dirname( fileName ) ):
        os.makedirs( os.path.dirname( fileName ), 0777 )
    
    if currentUpdateFrequency  == None :
        currentUpdateFrequency = self.getCurrentUpdateFrequency()   
    
    CpickleWrapper.save( currentUpdateFrequency, fileName )
        
    allEntries = os.listdir(paths.STATSTEMPAUTUPDTLOGS + self.updateType + "/") 
    
    allEntries.sort()
    
    entriesToRemove = allEntries[ :-self.numberOfLogsToKeep]
    
    for entrytoRemove in entriesToRemove:
        os.remove(paths.STATSTEMPAUTUPDTLOGS  + self.updateType + "/" + entrytoRemove ) 
Esempio n. 5
0
 def saveList( self, user, clients ):   
     """
         @summary : Saves list. 
         
         @note : Will include modification made in updateFileInlist method 
         
         @param clients : Client to wich the file is related(used to narrow down searchs)
         
         @param user   : Name of the client, person, etc.. wich has a relation with the 
                         file. 
         
         
     """
     statsPaths = StatsPaths()
     statsPaths.setPaths()
     directory = statsPaths.STATSDATA + "fileAccessVersions/"
      
     
     combinedName = ""
     for client in clients:
         combinedName = combinedName + client
     
     fileName  = combinedName + "_" + user 
     
     if not os.path.isdir( directory ):
         os.makedirs( directory, mode=0777 )
         #create directory
     completeFilename = directory + fileName 
     #print "saving %s" %completeFilename
             
     CpickleWrapper.save( object = self.savedFileList, filename = completeFilename )
Esempio n. 6
0
 def __getDocFilesToLinkTo(self, language):
     """    
         @summary : Gathers and returns all the documentation files
                    currently available 
         
         @summary : The list of fileNames to link to.
         
     """
     
     filesToLinkTo = []       
     
     statsPaths = StatsPaths()
     statsPaths.setPaths( self.mainLanguage )
     folder = statsPaths.STATSDOC + "html/"
     
     listOfFilesInFolder = os.listdir(folder)
     
     for file in listOfFilesInFolder:
         baseName = os.path.basename(file)
         if( fnmatch.fnmatch( baseName, "*_%s.html"%(language) ) ):
             filesToLinkTo.append( baseName )
     
     filesToLinkTo.sort()
     
     return filesToLinkTo
Esempio n. 7
0
def backupRRDDatabases(configParameters, currentTime, nbBackupsToKeep):
    """
    
        @summary: Based on current time and frequencies contained
                  within the time parameters, we will backup the databases
                  only if necessary.       
                            
        @param configParameters: StatsConfigParameters instance.
                               
        @param currenTime: currentTime in seconds since epoch format.
                                  
    """

    paths = StatsPaths()
    paths.setPaths()

    updateManager = AutomaticUpdatesManager(
        configParameters.nbAutoUpdatesLogsToKeep, "dbBackups")

    if updateManager.updateIsRequired(currentTime):
        commands.getstatusoutput(paths.STATSTOOLS + "backupRRDDatabases.py" +
                                 " " + str(int(nbBackupsToKeep)))
        print paths.STATSTOOLS + "backupRRDDatabases.py" + " " + str(
            nbBackupsToKeep)
        updateManager.addAutomaticUpdateToLogs(currentTime)
Esempio n. 8
0
def cleanUp(configParameters, currentTime, daysOfPicklesToKeep):
    """
    
        @summary: Based on current time and frequencies contained
                  within the time parameters, we will run 
                  the cleaners that need to be run.       
                            
        @param configParameters: StatsConfigParameters instance.
                               
        @param currenTime: currentTime in seconds since epoch format.
                                  
    """

    paths = StatsPaths()
    paths.setPaths()

    updateManager = AutomaticUpdatesManager(
        configParameters.nbAutoUpdatesLogsToKeep, "picklecleaner")

    if updateManager.updateIsRequired(currentTime):

        output = commands.getoutput(paths.STATSTOOLS + "pickleCleaner.py %s" %
                                    int(daysOfPicklesToKeep))
        print paths.STATSTOOLS + "pickleCleaner.py" + " " + str(
            daysOfPicklesToKeep)
        updateManager.addAutomaticUpdateToLogs(currentTime)

    updateManager = AutomaticUpdatesManager(
        configParameters.nbAutoUpdatesLogsToKeep, "generalCleaner")

    if updateManager.updateIsRequired(currentTime):
        commands.getstatusoutput(paths.STATSTOOLS + "clean_dir.plx" + " " +
                                 paths.STATSETC + "clean_dir.conf")
        print paths.STATSTOOLS + "clean_dir.plx" + " " + paths.STATSETC + "clean_dir.conf"
        updateManager.addAutomaticUpdateToLogs(currentTime)
Esempio n. 9
0
  def getClientsCurrentFileList( self, clients ):
      """
          @summary : Gets all the files associated with the list of clients.
                  
          
          @note : Client list is used here since we need to find all the pickles that will be used in a merger.
                  Thus unlike all other methods we dont refer here to the combined name but rather to a list of
                  individual machine names. 
          
          @summary : Returns the all the files in a dictionnary associated
                     with each file associated with it's mtime.
          
      """  
      
      
      fileNames = []
      statsPaths = StatsPaths()
      statsPaths.setPaths()
      
      for client in clients : 
          filePattern = statsPaths.STATSPICKLES + client + "/*/*"  #_??
          folderNames = glob.glob( filePattern )
                      
          
          for folder in folderNames:
              if os.path.isdir( folder ):                    
                  filePattern = folder + "/" + "*_??"
                  fileNames.extend( glob.glob( filePattern ) )       
                  
  
          for fileName in fileNames :
              self.currentClientFileList[fileName] = os.path.getmtime( fileName )            
 
              
      return  self.currentClientFileList       
Esempio n. 10
0
def updateFilesAssociatedWithMachineTags(tagsNeedingUpdates,
                                         machineParameters):
    """
        @summary : For all the tags for wich 
                   a machine was change we rename all the 
                   files associated with that tag.
        
        @param tagsNeedingUpdates: List of tags that have been modified 
                                   since the last call.
                                             
    
    """

    paths = StatsPaths()
    paths.setPaths()

    previousParameters = getMachineParametersFromPreviousCall()

    for tag in tagsNeedingUpdates:
        previousCombinedMachineNames = ""
        previousCombinedMachineNames = previousCombinedMachineNames.join(
            [x for x in previousParameters.getMachinesAssociatedWith(tag)])

        currentCombinedMachineNames = ""
        currentCombinedMachineNames = currentCombinedMachineNames.join(
            [x for x in machineParameters.getMachinesAssociatedWith(tag)])

        output = commands.getoutput(
            "%sfileRenamer.py -o %s  -n %s --overrideConfirmation" %
            (paths.STATSTOOLS, previousCombinedMachineNames,
             currentCombinedMachineNames))
        print "%sfileRenamer.py -o %s  -n %s --overrideConfirmation" % (
            paths.STATSTOOLS, previousCombinedMachineNames,
            currentCombinedMachineNames)
        print output
Esempio n. 11
0
def restoreDatabaseUpdateTimes(timeToRestore, currentTime, nbBackupsToKeep):
    """
       @summary : Copy all databases into a folder sporting the data of the backup.
        
       @param timeToRestore : Time of the DB backups to set as current DB.
       
       @param currentTime : Time of the call to the script.
       
       @param nbBackupsToKeep : total number of backups to keep.
       
    """

    statsPaths = StatsPaths()
    statsPaths.setPaths()

    source = statsPaths.STATSDBUPDATESBACKUPS + "/%s" % timeToRestore
    destination = statsPaths.STATSCURRENTDBUPDATES

    #Archive current Database
    backupRRDDatabases.backupDatabaseUpdateTimes(currentTime,
                                                 nbBackupsToKeep,
                                                 foldersToPreserve=[source])

    #restore desired
    status, output = commands.getstatusoutput("rm -r %s" % (destination))
    os.makedirs(destination)
    status, output = commands.getstatusoutput("cp -rf %s/* %s" %
                                              (source, destination))
    print output
Esempio n. 12
0
 def getTimeOfLastUpdateInLogs(self):
     """
         
         @summary : Returns the time of the last update in iso format.
    
         @return : None if no update as found, EPCH is returned in iso format,
                   as to make sure an update is made since no prior updates exist.
         
     """
     
     timeOfLastUpdate = StatsDateLib.getIsoTodaysMidnight( StatsDateLib.getCurrentTimeInIsoformat() ) 
     
     paths = StatsPaths()
     paths.setPaths()
     
     updatesDirectory = paths.STATSTEMPAUTUPDTLOGS + self.updateType + "/"
     
     if not os.path.isdir( updatesDirectory ):
         os.makedirs(updatesDirectory)       
     allEntries = os.listdir(updatesDirectory) 
     
     if allEntries !=[] :
         allEntries.sort()
         allEntries.reverse() 
         timeOfLastUpdate = os.path.basename( allEntries[0] ).replace( "_"," " )
         
         
     return timeOfLastUpdate
Esempio n. 13
0
def updateWordsFromDB(wordType, word, language):
    """    
        @summary: Updates words within the db depending 
                  on the specified type ofdatabases
        
        @param wordType     : Type of word : "products" or "groupName"
        
        @parameter language : Language that is currently used by the caller.
        
        @param word         : Word to add to the database
        
        @return             : None     
        
    """

    _ = LanguageTools.getTranslatorForModule(CURRENT_MODULE_ABS_PATH, language)

    statsPaths = StatsPaths()
    statsPaths.setPaths(language)

    if wordType == "products":
        updateWordsFromFile(statsPaths.STATSWEBWORDDATABASES + _('products'),
                            word)
    elif wordType == "groupName":
        updateWordsFromFile(statsPaths.STATSWEBWORDDATABASES + _('groupNames'),
                            word)
Esempio n. 14
0
    def __getAutomaticUpdatesDoneDuringTimeSpan( self, startTime, endtime ):
        """
        
            @param startTime: Start time of the span in iso format 
            
            @param endtime: end time of the span in iso format

        """
        #set to fit file standard
        startTime = startTime.replace( " ", "_" )
        endtime = endtime.replace( " ", "_" )
        
        def afterEndTime(x):
            return x <= endtime
        
        def beforeStartTime(x):
            return x >= startTime
        
 
        
        paths = StatsPaths()
        paths.setPaths()
        
        updates = os.listdir( updatesDirectory = paths.STATSTEMPAUTUPDTLOGS + self.updateType + "/" ) 

        updates =  filter( afterEndTime, updates)

        updates =  filter( beforeStartTime, updates)     
  
        
        return updates
Esempio n. 15
0
    def __getDocFilesToLinkTo(self, language):
        """    
            @summary : Gathers and returns all the documentation files
                       currently available 
            
            @summary : The list of fileNames to link to.
            
        """

        filesToLinkTo = []

        statsPaths = StatsPaths()
        statsPaths.setPaths(self.mainLanguage)
        folder = statsPaths.STATSDOC + "html/"

        listOfFilesInFolder = os.listdir(folder)

        for file in listOfFilesInFolder:
            baseName = os.path.basename(file)
            if (fnmatch.fnmatch(baseName, "*_%s.html" % (language))):
                filesToLinkTo.append(baseName)

        filesToLinkTo.sort()

        return filesToLinkTo
def restoreDatabaseUpdateTimes( timeToRestore, currentTime, nbBackupsToKeep ):
    """
       @summary : Copy all databases into a folder sporting the data of the backup.
        
       @param timeToRestore : Time of the DB backups to set as current DB.
       
       @param currentTime : Time of the call to the script.
       
       @param nbBackupsToKeep : total number of backups to keep.
       
    """

    statsPaths = StatsPaths()
    statsPaths.setPaths()
        
    source = statsPaths.STATSDBUPDATESBACKUPS + "/%s" %timeToRestore
    destination = statsPaths.STATSCURRENTDBUPDATES
    
    #Archive current Database
    backupRRDDatabases.backupDatabaseUpdateTimes( currentTime, nbBackupsToKeep, foldersToPreserve = [ source ] )
    
    #restore desired 
    status, output = commands.getstatusoutput( "rm -r %s" %( destination ) )
    os.makedirs(destination)
    status, output = commands.getstatusoutput( "cp -rf %s/* %s" %( source, destination ) )
    print output
Esempio n. 17
0
def updatePickledTimes( dateToSet = "2006-10-23 09:00:00"  ):
    """
          @summary : Get all the keys then set all of them to the desired date.
    """
    
    statsPaths = StatsPaths()
    statsPaths.setPaths()
    
    folder = statsPaths.STATSPICKLESTIMEOFUPDATES
    
    files = os.listdir(folder)
    for fileName in files :
        if os.path.isfile( fileName ):
    
            fileHandle   = open( fileName, "r" )
            pickledTimes = pickle.load( fileHandle )
            fileHandle.close()
            
            
            keys = pickledTimes.keys()
            for key in keys:
                pickledTimes[key] = dateToSet
                
            fileHandle  = open( fileName, "w" )
    
            pickle.dump( pickledTimes, fileHandle )
    
            fileHandle.close()
Esempio n. 18
0
def updateFilesAssociatedWithMachineTags( tagsNeedingUpdates, machineParameters ):   
    """
        @summary : For all the tags for wich 
                   a machine was change we rename all the 
                   files associated with that tag.
        
        @param tagsNeedingUpdates: List of tags that have been modified 
                                   since the last call.
                                             
    
    """
    
    paths = StatsPaths()
    paths.setPaths()
    
    previousParameters = getMachineParametersFromPreviousCall()
    
    for tag in tagsNeedingUpdates:
        previousCombinedMachineNames = ""
        previousCombinedMachineNames = previousCombinedMachineNames.join( [ x for x in previousParameters.getMachinesAssociatedWith( tag ) ] )
        
        currentCombinedMachineNames = ""
        currentCombinedMachineNames = currentCombinedMachineNames.join( [ x for x in machineParameters.getMachinesAssociatedWith( tag ) ]) 
        
        output = commands.getoutput( "%sfileRenamer.py -o %s  -n %s --overrideConfirmation" %( paths.STATSTOOLS, previousCombinedMachineNames, currentCombinedMachineNames  ) )
        print "%sfileRenamer.py -o %s  -n %s --overrideConfirmation" %( paths.STATSTOOLS, previousCombinedMachineNames, currentCombinedMachineNames  )
        print output 
Esempio n. 19
0
    def getSavedList(self, user, clients):
        """
            @summary : Returns the checksum of the files contained in the saved list.
        
        """

        self.savedFileList = {}

        statsPaths = StatsPaths()
        statsPaths.setPaths()
        directory = statsPaths.STATSDATA + "fileAccessVersions/"

        combinedName = ""
        for client in clients:
            combinedName = combinedName + client

        fileName = combinedName + "_" + user

        try:

            self.savedFileList = CpickleWrapper.load(directory + fileName)

            if self.savedFileLis == None:
                self.savedFileList = {}

        except:  # if file does not exist
            pass

        return self.savedFileList
Esempio n. 20
0
def updatePickledTimes(dateToSet="2006-10-23 09:00:00"):
    """
          @summary : Get all the keys then set all of them to the desired date.
    """

    statsPaths = StatsPaths()
    statsPaths.setPaths()

    folder = statsPaths.STATSPICKLESTIMEOFUPDATES

    files = os.listdir(folder)
    for fileName in files:
        if os.path.isfile(fileName):

            fileHandle = open(fileName, "r")
            pickledTimes = pickle.load(fileHandle)
            fileHandle.close()

            keys = pickledTimes.keys()
            for key in keys:
                pickledTimes[key] = dateToSet

            fileHandle = open(fileName, "w")

            pickle.dump(pickledTimes, fileHandle)

            fileHandle.close()
Esempio n. 21
0
    def getSavedList( self, user, clients ):
        """
            @summary : Returns the checksum of the files contained in the saved list.
        
        """

        self.savedFileList         = {}
        
        statsPaths = StatsPaths()
        statsPaths.setPaths()
        directory = statsPaths.STATSDATA + "fileAccessVersions/"              
                
        combinedName = ""
        for client in clients:
            combinedName = combinedName + client
        
        fileName  = combinedName + "_" + user            
            
        try :
            
            self.savedFileList = CpickleWrapper.load( directory + fileName )
            
            if self.savedFileLis == None :
                self.savedFileList = {}
                
        except: # if file does not exist
            pass
        
        
        return self.savedFileList
Esempio n. 22
0
def main():
    """
        @summary : Gathers options, then makes call to transferPickleToRRD   
    
    """

    paths = StatsPaths()
    paths.setPaths()

    language = 'en'

    setGlobalLanguageParameters()

    createPaths(paths)

    logger = Logger(paths.STATSLOGGING + 'stats_' + 'rrd_transfer' +
                    '.log.notb',
                    'INFO',
                    'TX' + 'rrd_transfer',
                    bytes=10000000)

    logger = logger.getLogger()

    parser = createParser()

    infos = getOptionsFromParser(parser, logger=logger)

    transferPickleToRRD(infos, logger=logger)
Esempio n. 23
0
    def saveList(self, user, clients):
        """
            @summary : Saves list. 
            
            @note : Will include modification made in updateFileInlist method 
            
            @param clients : Client to wich the file is related(used to narrow down searchs)
            
            @param user   : Name of the client, person, etc.. wich has a relation with the 
                            file. 
            
            
        """
        statsPaths = StatsPaths()
        statsPaths.setPaths()
        directory = statsPaths.STATSDATA + "fileAccessVersions/"

        combinedName = ""
        for client in clients:
            combinedName = combinedName + client

        fileName = combinedName + "_" + user

        if not os.path.isdir(directory):
            os.makedirs(directory, mode=0777)
            #create directory
        completeFilename = directory + fileName
        #print "saving %s" %completeFilename

        CpickleWrapper.save(object=self.savedFileList,
                            filename=completeFilename)
Esempio n. 24
0
    def getClientsCurrentFileList(self, clients):
        """
            @summary : Gets all the files associated with the list of clients.
                    
            
            @note : Client list is used here since we need to find all the pickles that will be used in a merger.
                    Thus unlike all other methods we dont refer here to the combined name but rather to a list of
                    individual machine names. 
            
            @summary : Returns the all the files in a dictionnary associated
                       with each file associated with it's mtime.
            
        """

        fileNames = []
        statsPaths = StatsPaths()
        statsPaths.setPaths()

        for client in clients:
            filePattern = statsPaths.STATSPICKLES + client + "/*/*"  #_??
            folderNames = glob.glob(filePattern)

            for folder in folderNames:
                if os.path.isdir(folder):
                    filePattern = folder + "/" + "*_??"
                    fileNames.extend(glob.glob(filePattern))

            for fileName in fileNames:
                self.currentClientFileList[fileName] = os.path.getmtime(
                    fileName)

        return self.currentClientFileList
Esempio n. 25
0
def transferLogFiles():
    """
        @summary : Log files will not be tansferred if local machine
                   is not designed to be a pickling machine. 
                   
                   If log files are to be transferred, they will be straight
                  from the source."
    """
    
    paths = StatsPaths()
    paths.setPaths()
    
    parameters = StatsConfigParameters()    
    machineParameters = MachineConfigParameters()
    machineParameters.getParametersFromMachineConfigurationFile()
    parameters.getAllParameters()
    individualSourceMachines   = machineParameters.getMachinesAssociatedWithListOfTags( parameters.sourceMachinesTags )
    individualPicklingMachines = machineParameters.getMachinesAssociatedWithListOfTags( parameters.picklingMachines )
        
    for sourceMachine,picklingMachine in map( None, individualSourceMachines, individualPicklingMachines ) :      
               
        if picklingMachine == LOCAL_MACHINE :#pickling to be done here  
            
            userName = machineParameters.getUserNameForMachine(sourceMachine)
            remoteLogPath = paths.getPXPathFromMachine( paths.PXLOG, sourceMachine, userName )
            
            
            print  "rsync -avzr --delete-before -e ssh %s@%s:%s %s%s/ " %( userName , sourceMachine,remoteLogPath , paths.STATSLOGS, sourceMachine  )
            output = commands.getoutput( "rsync -avzr --delete-before -e ssh %s@%s:%s %s%s/ " %( userName , sourceMachine, remoteLogPath, paths.STATSLOGS, sourceMachine  ) )
            print output
Esempio n. 26
0
def updateCsvFiles():
    """    
        
        @summary : Runs the csv file update utility.
        
    """
    
    paths = StatsPaths()
    paths.setPaths()    
    output = commands.getoutput( "%sgetCsvFilesforWebPages.py" %paths.STATSWEBPAGESGENERATORS )
Esempio n. 27
0
    def __init__( self, directory, fileType, clientNames = None , groupName = "",  timespan = 12,\
                  currentTime = None, productTypes = None, logger = None, logging = True, machines = None,\
                  workingLanguage = None, outputLanguage = None  ):
        """
        
            @summary  : ClientGraphicProducer constructor. 
            
                        CurrentTime format is ISO meaning 
                        "2006-06-8 00:00:00". Will use 
                        current system time by default.   
                        
                        CurrentTime is to be used if a different 
                        time than sytem time is to be used. 
                        
                        Very usefull for testing or to implement graphic 
                        request where user can choose start time.      
        
        """

        global _
        _ = self.getTranslatorForModule(CURRENT_MODULE_ABS_PATH,
                                        workingLanguage)

        if currentTime != None:
            currentTime = currentTime
        else:
            currentTime = time.time()

        self.directory = directory  # Directory where log files are located.
        self.fileType = fileType  # Type of log files to be used.
        self.machines = machines or []  # Machines for wich to collect data.
        self.clientNames = clientNames or [
        ]  # Client name we need to get the data from.
        self.groupName = groupName  # Name for a group of clients to be combined.
        self.timespan = timespan  # Number of hours we want to gather the data from.
        self.currentTime = currentTime  # Time when stats were queried.
        self.productTypes = productTypes or [
            "All"
        ]  # Specific data types on wich we'll collect the data.
        self.loggerName = 'graphs'  # Name of the logger
        self.logger = logger  # Logger to use is logging == true.
        self.logging = logging  # Whether or not to enable logging.
        self.outputLanguage = outputLanguage  # Language in which the graphic will be produced in.

        paths = StatsPaths()
        paths.setPaths()
        if logging == True:
            if self.logger is None:  # Enable logging
                if not os.path.isdir(paths.STATSLOGGING):
                    os.makedirs(paths.STATSLOGGING, mode=0777)
                self.logger = Logger( paths.STATSLOGGING  + 'stats_' + self.loggerName + '.log.notb', 'INFO',\
                                      'TX' + self.loggerName, bytes = 10000000  )
                self.logger = self.logger.getLogger()
        else:
            self.logger = None
Esempio n. 28
0
def updateCsvFiles():
    """    
        
        @summary : Runs the csv file update utility.
        
    """

    paths = StatsPaths()
    paths.setPaths()
    output = commands.getoutput("%sgetCsvFilesforWebPages.py" %
                                paths.STATSWEBPAGESGENERATORS)
Esempio n. 29
0
def giveOutPermissionsToFolders(currentlyUsedLanguages):
    """    
        @summary : opens up permissions to folders that 
                   might be required by the web user.
                   
        @param currentlyUsedLanguages: Languages currently set to be 
                                       displayed in the web interface
    
    """

    for language in currentlyUsedLanguages:

        _ = LanguageTools.getTranslatorForModule(CURRENT_MODULE_ABS_PATH,
                                                 language)

        paths = StatsPaths()
        paths.setPaths(language)

        pathsToOpenUp = []

        pathsToOpenUp.append(paths.STATSLOGGING)
        pathsToOpenUp.append(paths.STATSPICKLES)

        pathsToOpenUp.append(paths.STATSDB)

        pathsToOpenUp.append(paths.STATSCURRENTDB)
        pathsToOpenUp.append(paths.STATSCURRENTDB + _("bytecount"))
        pathsToOpenUp.append(paths.STATSCURRENTDB + _("errors"))
        pathsToOpenUp.append(paths.STATSCURRENTDB + _("filecount"))
        pathsToOpenUp.append(paths.STATSCURRENTDB + _("filesOverMaxLatency"))
        pathsToOpenUp.append(paths.STATSCURRENTDB + _("latency"))

        pathsToOpenUp.append(paths.STATSCURRENTDBUPDATES)
        pathsToOpenUp.append(paths.STATSCURRENTDBUPDATES + _("rx"))
        pathsToOpenUp.append(paths.STATSCURRENTDBUPDATES + _("tx"))
        pathsToOpenUp.append(paths.STATSCURRENTDBUPDATES + _("totals"))

        pathsToOpenUp.append(paths.STATSDBBACKUPS)
        pathsToOpenUp.append(paths.STATSDBBACKUPS + "*/" + _("rx"))
        pathsToOpenUp.append(paths.STATSDBBACKUPS + "*/" + _("tx"))
        pathsToOpenUp.append(paths.STATSDBBACKUPS + "*/" + _("totals"))

        pathsToOpenUp.append(paths.STATSGRAPHS)
        pathsToOpenUp.append(paths.STATSGRAPHS + _("others/"))
        pathsToOpenUp.append(paths.STATSGRAPHS + _("others/") + "gnuplot/")
        pathsToOpenUp.append(paths.STATSGRAPHS + _("others/") + "rrd/")

        pathsToOpenUp.append(paths.STATSWEBPAGESHTML + "/popUps/")

        for path in pathsToOpenUp:
            if not os.path.isdir(path):
                os.makedirs(path, 0777)
            commands.getstatusoutput("chmod 0777 %s" % path)
            commands.getstatusoutput("chmod 0777 %s/*" % path)
Esempio n. 30
0
def transfer(login, machine):
    """
        @summary : Transfers all the required files
                   from specified machine into local 
                   machine.
    
                   Every task is done 10 times.
        
                   This is done in hope that everything 
                   downloaded will be as coherent as possible.
        
                   If a large amount of file is to 
                   be transferred, some files that were downloaded
                   at the beginning of the transfer,wich can take 
                   up to a few hours,might not be as up to date as 
                   the file downloaded towards the end of the transfer  
   
           
    """

    statsPaths = StatsPaths()
    statsPaths.setPaths()

    localPaths = getLocalPathsToSynchronise()
    buildLocalPaths(localPaths)
    configFiles = getConfigFiles()

    for i in range(10):

        transferLogFiles()

        for localPath in localPaths:
            remotePath = statsPaths.getStatsPathFromMachine(
                localPath, machine, login)
            print "rsync -avzr  --delete-before -e ssh %s@%s:%s %s" % (
                login, machine, remotePath, localPath)
            output = commands.getoutput(
                "rsync -avzr  --delete-before -e ssh %s@%s:%s %s" %
                (login, machine, remotePath, localPath))
            print output

    for configfile in configFiles:
        #get real path
        localPath = os.path.dirname(configfile)
        fileName = os.path.basename(configfile)
        remotePath = statsPaths.getStatsPathFromMachine(
            localPath, machine, login)

        print "rsync -avzr  --delete-before -e ssh %s@%s:%s %s" % (
            login, machine, remotePath + fileName, localPath + fileName)
        output = commands.getoutput(
            "rsync -avzr  --delete-before -e ssh %s@%s:%s %s" %
            (login, machine, remotePath + fileName, localPath + fileName))
        print output
def giveOutPermissionsToFolders( currentlyUsedLanguages ):
    """    
        @summary : opens up permissions to folders that 
                   might be required by the web user.
                   
        @param currentlyUsedLanguages: Languages currently set to be 
                                       displayed in the web interface
    
    """
    
    for language in currentlyUsedLanguages:
        
        _ = LanguageTools.getTranslatorForModule(CURRENT_MODULE_ABS_PATH, language)
        
        paths = StatsPaths()        
        paths.setPaths(language)        
        
        pathsToOpenUp = []
        
        pathsToOpenUp.append( paths.STATSLOGGING)
        pathsToOpenUp.append( paths.STATSPICKLES )
        
        pathsToOpenUp.append( paths.STATSDB)
        
        pathsToOpenUp.append( paths.STATSCURRENTDB )        
        pathsToOpenUp.append( paths.STATSCURRENTDB + _("bytecount") )
        pathsToOpenUp.append( paths.STATSCURRENTDB + _("errors")  )
        pathsToOpenUp.append( paths.STATSCURRENTDB + _("filecount") )
        pathsToOpenUp.append( paths.STATSCURRENTDB + _("filesOverMaxLatency"))
        pathsToOpenUp.append( paths.STATSCURRENTDB + _("latency"))      
        
        pathsToOpenUp.append( paths.STATSCURRENTDBUPDATES)
        pathsToOpenUp.append( paths.STATSCURRENTDBUPDATES + _("rx") )
        pathsToOpenUp.append( paths.STATSCURRENTDBUPDATES + _("tx") )
        pathsToOpenUp.append( paths.STATSCURRENTDBUPDATES + _("totals") )        
        
        pathsToOpenUp.append( paths.STATSDBBACKUPS )
        pathsToOpenUp.append( paths.STATSDBBACKUPS + "*/" + _("rx") )
        pathsToOpenUp.append( paths.STATSDBBACKUPS + "*/" + _("tx") )
        pathsToOpenUp.append( paths.STATSDBBACKUPS + "*/" + _("totals") )    
                
        pathsToOpenUp.append( paths.STATSGRAPHS )
        pathsToOpenUp.append( paths.STATSGRAPHS +_("others/"))
        pathsToOpenUp.append( paths.STATSGRAPHS +_("others/") + "gnuplot/")
        pathsToOpenUp.append( paths.STATSGRAPHS +_("others/") + "rrd/")
    
        pathsToOpenUp.append( paths.STATSWEBPAGESHTML + "/popUps/")
        
        for path in pathsToOpenUp:
            if not os.path.isdir(path):
                os.makedirs(path, 0777)
            commands.getstatusoutput( "chmod 0777 %s" %path )
            commands.getstatusoutput( "chmod 0777 %s/*" %path )
Esempio n. 32
0
def buildCsvFileName(infos):
    """ 
    
        @summary: Builds and returns the file name to use for the csv file.
        
        @param infos: _CvsInfos instance containing the required 
                      information to build up the file name.
        
        @return: Return the built up file name.              
                      
    """

    global _

    StatsDateLib.setLanguage(infos.outputLanguage)
    paths = StatsPaths()
    paths.setPaths(infos.outputLanguage)

    machinesStr = str(infos.machinesForLabels).replace('[', '').replace(
        ']', '').replace(',', '').replace("'",
                                          "").replace('"',
                                                      '').replace(' ', '')

    currentYear, currentMonth, currentDay = StatsDateLib.getYearMonthDayInStrfTime(
        StatsDateLib.getSecondsSinceEpoch(infos.start))
    currentWeek = time.strftime(
        "%W", time.gmtime(StatsDateLib.getSecondsSinceEpoch(infos.start)))

    fileName = paths.STATSCSVFILES

    if infos.span == "daily":
        fileName = fileName + "/" + _(
            "daily/") + infos.fileType + "/%s/%s/%s/%s.csv" % (
                machinesStr, currentYear, currentMonth, currentDay)

    elif infos.span == "weekly":
        fileName = fileName + "/" + _(
            "weekly/") + infos.fileType + "/%s/%s/%s.csv" % (
                machinesStr, currentYear, currentWeek)

    elif infos.span == "monthly":
        fileName = fileName + "/" + _(
            "monthly/") + infos.fileType + "/%s/%s/%s.csv" % (
                machinesStr, currentYear, currentMonth)

    elif infos.span == "yearly":
        fileName = fileName + "/" + _(
            "yearly/") + infos.fileType + "/%s/%s.csv" % (machinesStr,
                                                          currentYear)

    StatsDateLib.setLanguage(LanguageTools.getMainApplicationLanguage())

    return fileName
Esempio n. 33
0
def main():
    """
        Parses parameters then calls 
        the tranferMethod.
    """
    
    setGlobalLanguageParameters()
    paths = StatsPaths()
    paths.setPaths()
    
    
    login   = ""
    machine = ""
    
    if len( sys.argv ) == 3   :
        
        login   = sys.argv[1]
        machine = sys.argv[2]     
        transfer( login, machine )
        
    else:
        print _( "#######################################################################################################" )
        print _( "#" )
        print _( "#    Help for retreiveDataFromMachine.py" )
        print _( "#" )
        print _( "#    This program is to be used to transfer all of the important stats files" )
        print _( "#    from a remote machine to the local machine." )
        print _( "#" )
        print _( "#    If large transfers are to be done, program may take many hours to complete." )
        print _( "#    Output from every operation will be printed as to let the user see exactly what is going on" )
        print _( "#    and allow errors to be detected." )
        print _( "#" )
        print _( "#    Examples : invalid login or ssh other ssh errors" )
        print _( "#" )
        print _( "#    This will also serve as to take out the guesswork as to why the program is taking so long to complete." )
        print _( "#" )
        print _( "#    Log files will not be tansferred if local machine is not designed to be a pickling machine. " )
        print _( "#    If log files are to be transferred, they will be transferred straight from the source." )
        print _( "#" )
        print _( "#    *** Make sure %sconfig is filled properly prior to running this script !!! ***") %paths.STATSETC 
        print _( "#    *** Consult pxStats documentation if you are unsure how to fill %sconfig.  ***") %paths.STATSETC 
        print _( "#" )
        print _( "#" )    
        print _( "#" )
        print _( "#    Usage  : Program must receive exactly two arguments." )
        print _( "#" )
        print _( "#    Example : python retreiveDataFromMachine.py login machineName "   )
        print _( "#" )
        print _( "#######################################################################################################")
        print ""
        print ""
        print ""
        sys.exit()
Esempio n. 34
0
 def __init__( self, directory, fileType, clientNames = None , groupName = "",  timespan = 12,\
               currentTime = None, productTypes = None, logger = None, logging = True, machines = None,\
               workingLanguage = None, outputLanguage = None  ):
     """
     
         @summary  : ClientGraphicProducer constructor. 
         
                     CurrentTime format is ISO meaning 
                     "2006-06-8 00:00:00". Will use 
                     current system time by default.   
                     
                     CurrentTime is to be used if a different 
                     time than sytem time is to be used. 
                     
                     Very usefull for testing or to implement graphic 
                     request where user can choose start time.      
     
     """
     
     global _
     _ = self.getTranslatorForModule( CURRENT_MODULE_ABS_PATH, workingLanguage )
     
     
     if currentTime != None :
         currentTime = currentTime 
     else:
         currentTime = time.time()
         
     self.directory    = directory          # Directory where log files are located. 
     self.fileType     = fileType           # Type of log files to be used.    
     self.machines     = machines or []     # Machines for wich to collect data. 
     self.clientNames  = clientNames or []  # Client name we need to get the data from.
     self.groupName    = groupName          # Name for a group of clients to be combined.
     self.timespan     = timespan           # Number of hours we want to gather the data from. 
     self.currentTime  = currentTime        # Time when stats were queried.
     self.productTypes  = productTypes or ["All"] # Specific data types on wich we'll collect the data.
     self.loggerName   = 'graphs'           # Name of the logger
     self.logger       = logger             # Logger to use is logging == true.
     self.logging      = logging            # Whether or not to enable logging. 
     self.outputLanguage = outputLanguage   # Language in which the graphic will be produced in.
     
     paths = StatsPaths()
     paths.setPaths()
     if logging == True:
         if self.logger is None: # Enable logging
             if not os.path.isdir( paths.STATSLOGGING ):
                 os.makedirs( paths.STATSLOGGING , mode=0777 )
             self.logger = Logger( paths.STATSLOGGING  + 'stats_' + self.loggerName + '.log.notb', 'INFO',\
                                   'TX' + self.loggerName, bytes = 10000000  ) 
             self.logger = self.logger.getLogger()
     else:
         self.logger = None        
Esempio n. 35
0
def getLocalPathsToSynchronise():
    """
        @summary : Returns the list of local paths that are 
                   required to with the remote machine we 
                   want to be synchronised with.
    """
    statsPaths = StatsPaths()
    statsPaths.setPaths()
    
    paths = [ statsPaths.STATSMONITORING , statsPaths.STATSGRAPHS , statsPaths.STATSPICKLES, statsPaths.STATSCURRENTDB,\
              statsPaths.STATSDBBACKUPS , statsPaths.STATSCURRENTDBUPDATES, statsPaths.STATSDBUPDATESBACKUPS ]
    
    return paths 
Esempio n. 36
0
def getLocalPathsToSynchronise():
    """
        @summary : Returns the list of local paths that are 
                   required to with the remote machine we 
                   want to be synchronised with.
    """
    statsPaths = StatsPaths()
    statsPaths.setPaths()

    paths = [ statsPaths.STATSMONITORING , statsPaths.STATSGRAPHS , statsPaths.STATSPICKLES, statsPaths.STATSCURRENTDB,\
              statsPaths.STATSDBBACKUPS , statsPaths.STATSCURRENTDBUPDATES, statsPaths.STATSDBUPDATESBACKUPS ]

    return paths
Esempio n. 37
0
def uploadGraphicFiles( parameters, machineParameters ):
    """
        @summary : Takes all the created daily graphics dedicated to clumbo and 
                   uploads them to the machines specified in the parameters. 
    """
    
    paths = StatsPaths()
    paths.setPaths()
    
    for uploadMachine in parameters.graphicsUpLoadMachines :
        output = commands.getoutput( "scp %s* %s@%s:%s " %( paths.STATSCOLGRAPHS, machineParameters.getUserNameForMachine(uploadMachine), uploadMachine, paths.PDSCOLGRAPHS   ) )
        
        print "scp %s* %s@%s:%s " %( paths.STATSCOLGRAPHS, machineParameters.getUserNameForMachine(uploadMachine),uploadMachine, paths.PDSCOLGRAPHS )
        print output
Esempio n. 38
0
def updateDatabases( parameters, machineParameters, currentTimeInIsoFormat ):
    """
        @summary :  Updates all the required databases by transferring the
                    data found in the pickle files into rrd databases files.
                    
                    First transfers all the pickles into databases for all the clusters.
                    
                    Then combines all the data required by the different groups found 
                    within the config file.
       
        @param parameters: StatsConfigParameters instance containing 
                           the parameters found in the config file.
        
        @param machineParameters: MachineConfigParameters instance containing 
                                  the parameters found in the config file.
        
        @param currentTimeInIsoFormat : Time at which this program was originally 
                                        called.    
                                        
        @return : None
                                                
    """
    
    paths = StatsPaths()
    paths.setPaths()
    
    #Small safety measure in case another instance of the program is allready running.
    if transferToDatabaseAlreadyRunning() == False :
        
        for tag in parameters.machinesToBackupInDb :
             machines = machineParameters.getMachinesAssociatedWith(tag)             
             machines = str( machines ).replace( "[", "" ).replace( "]", "" ).replace( " ", "" )
             output = commands.getoutput( "%stransferPickleToRRD.py -m '%s' -e '%s' " %( paths.STATSBIN, machines, currentTimeInIsoFormat )  )
             print  "%stransferPickleToRRD.py -m '%s' " %( paths.STATSBIN, machines )
             print "output:%s" %output
        
        if parameters.groupParameters.groups != []:
            
            for group in  parameters.groupParameters.groups :
                                
                groupMembers = str( parameters.groupParameters.groupsMembers[group]).replace( "[", "" ).replace( "]", "" ).replace( " ", "" )
                groupMachines = str( parameters.groupParameters.groupsMachines[group] ).replace( "[", "" ).replace( "]", "" ).replace( " ", "" )                 
                groupProducts = str( parameters.groupParameters.groupsProducts[group] ).replace( "[", "" ).replace( "]", "" ).replace( " ", "" )
                groupFileTypes = str(parameters.groupParameters.groupFileTypes[group]).replace( "[", "" ).replace( "]", "" ).replace( " ", "" )
               
                output = commands.getoutput( "%stransferPickleToRRD.py -c '%s' -m '%s' -g '%s' -f %s -p '%s' -e '%s' " %( paths.STATSBIN, groupMembers, groupMachines, group, groupFileTypes, groupProducts, currentTimeInIsoFormat  ) )
                print   "%stransferPickleToRRD.py -c '%s' -m '%s' -g '%s' -f %s -p '%s' " %( paths.STATSBIN, groupMembers, groupMachines, group, groupFileTypes, groupProducts  )
                print output
Esempio n. 39
0
def getListOfPickleUpdateFiles():
    """
        @summary: Returns the list of currently 
                  available pickle update files
        
        @return: Returns the list of currently 
                 available pickle update files
    
    """
    
    statsPaths = StatsPaths()
    statsPaths.setPaths()
    
    files = glob.glob( statsPaths.STATSPICKLESTIMEOFUPDATES + '*' )
    
    return files
Esempio n. 40
0
def saveCurrentMachineParameters(machineParameters):
    """
        @summary : Saves the current machineParameters into 
                   the /data/previousMachineParameters file. 
        
        @param machineParameters: Machine parameters to save.
        
    """

    paths = StatsPaths()
    paths.setPaths()

    if not os.path.isdir(os.path.dirname(paths.STATSPREVIOUSMACHINEPARAMS)):
        os.makedirs(os.path.dirname(paths.STATSPREVIOUSMACHINEPARAMS))

    CpickleWrapper.save(machineParameters, paths.STATSPREVIOUSMACHINEPARAMS)
Esempio n. 41
0
def getListOfPickleUpdateFiles():
    """
        @summary: Returns the list of currently 
                  available pickle update files
        
        @return: Returns the list of currently 
                 available pickle update files
    
    """

    statsPaths = StatsPaths()
    statsPaths.setPaths()

    files = glob.glob(statsPaths.STATSPICKLESTIMEOFUPDATES + "*")

    return files
Esempio n. 42
0
def saveCurrentMachineParameters( machineParameters  ):
    """
        @summary : Saves the current machineParameters into 
                   the /data/previousMachineParameters file. 
        
        @param machineParameters: Machine parameters to save.
        
    """
    
    paths = StatsPaths()
    paths.setPaths()
    
    if not os.path.isdir( os.path.dirname( paths.STATSPREVIOUSMACHINEPARAMS ) ):
        os.makedirs( os.path.dirname(paths.STATSPREVIOUSMACHINEPARAMS) )
    
    CpickleWrapper.save( machineParameters, paths.STATSPREVIOUSMACHINEPARAMS)
Esempio n. 43
0
    def __updateCsvFiles(self, type, clusters, cost):
        """
        
            @summary    : Generate th rx and tx csv files
                          for yesterday for all clusters.
            
            @param type : daily | weekly | monthly | yearly 
             
            @param clusters :  List of currently running source clusters.

            @param cost : total operational cost for the perido specified 
                          by the type
            
            @return : None
    
        """

        paths = StatsPaths()
        paths.setPaths()

        typeParameters = {
            "daily": "-d",
            "weekly": "-w",
            "monthly": "-m",
            "yearly": "-y"
        }

        output = commands.getoutput(
            paths.STATSBIN +
            'csvDataConversion.py --includeGroups %s --machines "%s" --machinesAreClusters --fixedPrevious --date "%s" -f rx --language %s'
            % (typeParameters[type], clusters, self.timeOfRequest,
               self.outputLanguage))
        #print paths.STATSBIN + 'csvDataConversion.py --includeGroups %s --machines "%s" --machinesAreClusters --fixedPrevious --date "%s" -f rx --language %s'  %( typeParameters[type], clusters, self.timeOfRequest, self.outputLanguage )

        output = commands.getoutput(
            paths.STATSBIN +
            'csvDataConversion.py --includeGroups %s --machines "%s" --machinesAreClusters --fixedPrevious --date "%s" -f tx --language %s'
            % (typeParameters[type], clusters, self.timeOfRequest,
               self.outputLanguage))
        #print paths.STATSBIN + 'csvDataConversion.py --includeGroups %s --machines "%s" --machinesAreClusters --fixedPrevious --date "%s" -f tx --language %s'  %( typeParameters[type], clusters, self.timeOfRequest, self.outputLanguage )

        fileName = self.__getFileNameFromExecutionOutput(output)

        if fileName != "":
            commands.getstatusoutput(
                paths.STATSWEBPAGESGENERATORS +
                'csvDataFiltersForWebPages.py -c %s -f %s ' % (cost, fileName))
Esempio n. 44
0
def transfer( login, machine ):
    """
        @summary : Transfers all the required files
                   from specified machine into local 
                   machine.
    
                   Every task is done 10 times.
        
                   This is done in hope that everything 
                   downloaded will be as coherent as possible.
        
                   If a large amount of file is to 
                   be transferred, some files that were downloaded
                   at the beginning of the transfer,wich can take 
                   up to a few hours,might not be as up to date as 
                   the file downloaded towards the end of the transfer  
   
           
    """    
    
    statsPaths = StatsPaths()
    statsPaths.setPaths()
    
    localPaths = getLocalPathsToSynchronise()
    buildLocalPaths( localPaths )
    configFiles = getConfigFiles()
            
    for i in range( 10 ):
                
        transferLogFiles()
        
        for localPath in localPaths :
            remotePath = statsPaths.getStatsPathFromMachine( localPath, machine, login )
            print "rsync -avzr  --delete-before -e ssh %s@%s:%s %s"  %( login, machine, remotePath, localPath )
            output = commands.getoutput( "rsync -avzr  --delete-before -e ssh %s@%s:%s %s"  %( login, machine, remotePath, localPath ) )
            print output        
             
            
    for configfile in configFiles:
        #get real path 
        localPath = os.path.dirname( configfile )
        fileName  = os.path.basename( configfile )
        remotePath = statsPaths.getStatsPathFromMachine( localPath, machine, login )
        
        print "rsync -avzr  --delete-before -e ssh %s@%s:%s %s"  %( login, machine, remotePath + fileName, localPath + fileName )
        output = commands.getoutput( "rsync -avzr  --delete-before -e ssh %s@%s:%s %s"  %( login, machine, remotePath + fileName, localPath + fileName ) )
        print output       
Esempio n. 45
0
def getConfigFiles() :
    """            
        @summary : Returns the list of config files to synchronise.
        
        @return  : Returns the list of config files to synchronise.
         
    """
    
    statsPaths = StatsPaths()
    statsPaths.setPaths()
    
    configFiles = []
    configFiles.append( statsPaths.STATSMONITORING + 'maxSettings.conf' )
    configFiles.append( statsPaths.STATSMONITORING + 'previousCrontab' )      
    configFiles.append( statsPaths.STATSMONITORING + 'previousFileChecksums')
    
    return configFiles
Esempio n. 46
0
def getMachineParametersFromPreviousCall() :
    """
        @summary: Gets the machine parameters that are 
                  saved in data/previousMachineParameters.   
        
        @return: Returns the saved machine parameters. 
    
    """
    
    paths = StatsPaths()
    paths.setPaths()
    
    previousMachineParams = None
    if os.path.isfile( paths.STATSPREVIOUSMACHINEPARAMS ):
        previousMachineParams = CpickleWrapper.load( paths.STATSPREVIOUSMACHINEPARAMS )
    
    return  previousMachineParams  
Esempio n. 47
0
def getConfigFiles():
    """            
        @summary : Returns the list of config files to synchronise.
        
        @return  : Returns the list of config files to synchronise.
         
    """

    statsPaths = StatsPaths()
    statsPaths.setPaths()

    configFiles = []
    configFiles.append(statsPaths.STATSMONITORING + 'maxSettings.conf')
    configFiles.append(statsPaths.STATSMONITORING + 'previousCrontab')
    configFiles.append(statsPaths.STATSMONITORING + 'previousFileChecksums')

    return configFiles
Esempio n. 48
0
def getMachineParametersFromPreviousCall():
    """
        @summary: Gets the machine parameters that are 
                  saved in data/previousMachineParameters.   
        
        @return: Returns the saved machine parameters. 
    
    """

    paths = StatsPaths()
    paths.setPaths()

    previousMachineParams = None
    if os.path.isfile(paths.STATSPREVIOUSMACHINEPARAMS):
        previousMachineParams = CpickleWrapper.load(
            paths.STATSPREVIOUSMACHINEPARAMS)

    return previousMachineParams
Esempio n. 49
0
 def previousUpdateFrequency(self): 
     """   
         
         @summary : Finds and returns the frequency 
                    of the previous update.
         
         @return : The freqency of the previous update.            
                                 
     """
     
     paths = StatsPaths()
     paths.setPaths()
     
     lastUpdate = self.getTimeOfLastUpdateInLogs()
     fileName = paths.STATSTEMPAUTUPDTLOGS + str(lastUpdate).replace( " ", "_" )
     lastUpdateFrequency = CpickleWrapper.load(fileName)
     
     return  lastUpdateFrequency
def copySourceFiles( currentlyUsedLanguages ):
    """
        @summary : makes sure source files are available in all 
                   of the source folder of each languages.
                            
    """
    
    statsPaths = StatsPaths()
    statsPaths.setPaths( 'en' )
    englishSourceFolder = statsPaths.STATSWEBPAGES
    
    for language in currentlyUsedLanguages:
        if language != 'en' :
            statsPaths.setPaths( language )
            destinationFolder = statsPaths.STATSWEBPAGES
            if not os.path.isdir( destinationFolder ) :
                os.makedirs( destinationFolder )
            commands.getstatusoutput( "cp -r %s/* %s/" %( englishSourceFolder, destinationFolder )   )
Esempio n. 51
0
def getListOfFileAccessFiles():  
    """
        @summary: Returns the list of file version pickle files 
                  currently found on the local machine. 
    
    """   
    
    statsPaths = StatsPaths()
    statsPaths.setPaths()
        
    listOfFileAccessFiles = []
    
    if os.path.isdir(statsPaths.STATSLOGACCESS) :
        
        listOfFileAccessFiles = os.listdir( statsPaths.STATSLOGACCESS )
        listOfFileAccessFiles = filter( filterentriesStartingWithDots, listOfFileAccessFiles )
        listOfFileAccessFiles = [ statsPaths.STATSLOGACCESS + file for file in  listOfFileAccessFiles ]
    
    return listOfFileAccessFiles                
Esempio n. 52
0
 def __init__( self, accessDictionary = None, accessFile = "" ):
     """
         @summary:  LogFileAccessManager constructor. 
         
         @param accessArrays:
         @param accessFile:
     
     """
     paths = StatsPaths()
     paths.setPaths()
     
     if accessFile =="":
         accessFile = paths.STATSLOGACCESS + "default"
             
     self.accessDictionary = accessDictionary or {} # Empty array to start with.
     self.accessFile = accessFile #File that contains the current file acces.
     
     if self.accessDictionary == {} and os.path.isfile( self.accessFile ): 
         self.loadAccessFile()
Esempio n. 53
0
def copySourceFiles(currentlyUsedLanguages):
    """
        @summary : makes sure source files are available in all 
                   of the source folder of each languages.
                            
    """

    statsPaths = StatsPaths()
    statsPaths.setPaths('en')
    englishSourceFolder = statsPaths.STATSWEBPAGES

    for language in currentlyUsedLanguages:
        if language != 'en':
            statsPaths.setPaths(language)
            destinationFolder = statsPaths.STATSWEBPAGES
            if not os.path.isdir(destinationFolder):
                os.makedirs(destinationFolder)
            commands.getstatusoutput("cp -r %s/* %s/" %
                                     (englishSourceFolder, destinationFolder))
 def getParametersFromMonitoringConfigurationFile( self ):
     """
         @summary : Gather all the parameters from the StatsPaths.STATSETC/config file.
         
         @return :  All collected values in this order emails, machines,
                    files, folders, maxUsages, errorsLogFile, maxSettingsFile.
     
     """   
     
     statsPaths = StatsPaths()
     statsPaths.setPaths()
 
     CONFIG = statsPaths.STATSETC +"monitoringConf" 
     config = ConfigParser()
     
     if os.path.isfile( CONFIG ):
         file = open( CONFIG )
         config.readfp( file ) 
         
         self.emails        = config.get( 'statsMonitoring', 'emails' ).split( ";" )
         self.sender        = config.get( 'statsMonitoring', 'sender' )
         self.smtpServer    = config.get( 'statsMonitoring', 'smtpServer' )
         self.machines      = config.get( 'statsMonitoring', 'machines' ).split( ";" )
         self.files         = config.get( 'statsMonitoring', 'files' ).split( ";" )
         self.folders       = config.get( 'statsMonitoring', 'folders' ).split( ";" )
         self.maxUsages     = config.get( 'statsMonitoring', 'maxUsages' ).split( ";" )
         self.errorsLogFile = config.get( 'statsMonitoring', 'errorsLogFile' )
         self.maxSettingsFile=config.get( 'statsMonitoring', 'maxSettingsFile' )
                
         self.endTime = StatsDateLib.getIsoWithRoundedHours( StatsDateLib.getIsoFromEpoch( time.time() ) )            
         self.startTime = self.getPreviousMonitoringJob(self.endTime)
         self.maximumGaps = self.getMaximumGaps( )
         self.updateMachineNamesBasedOnExistingMachineTags()
         
         try:
             file.close()
         except:
             pass
         
     else:
         #print "%s configuration file not present. Please restore file prior to running" %CONFIG
         raise Exception( "%s configuration file not present. Please restore file prior to running" %CONFIG ) 
 def __updateMaxSettingsFile( self, parameters ):
         """
             @summary : This method is used to download 
                        the latest version of all the required
                        files.
            
             @parameter parameters : StatsConfig parameters isntance
 
         """    
         
         paths = StatsPaths()
         paths.setPaths()   
         
         if not os.path.isdir(paths.STATSMONITORING) :
             os.makedirs(paths.STATSMONITORING)
         if len( parameters.detailedParameters.uploadMachines ) != 0:
             machine = parameters.detailedParameters.uploadMachines[0]
             login = parameters.detailedParameters.uploadMachinesLogins[machine]
         
             commands.getstatusoutput( "scp %s@%s:%smaxSettings.conf %smaxSettings.conf >>/dev/null 2>&1" %(login, machine, paths.PDSCOLETC, paths.STATSMONITORING ) )         
Esempio n. 56
0
    def __init__(self, accessDictionary=None, accessFile=""):
        """
            @summary:  LogFileAccessManager constructor. 
            
            @param accessArrays:
            @param accessFile:
        
        """
        paths = StatsPaths()
        paths.setPaths()

        if accessFile == "":
            accessFile = paths.STATSLOGACCESS + "default"

        self.accessDictionary = accessDictionary or {
        }  # Empty array to start with.
        self.accessFile = accessFile  #File that contains the current file acces.

        if self.accessDictionary == {} and os.path.isfile(self.accessFile):
            self.loadAccessFile()
Esempio n. 57
0
def uploadGraphicFiles(parameters, machineParameters):
    """
        @summary : Takes all the created daily graphics dedicated to clumbo and 
                   uploads them to the machines specified in the parameters. 
    """

    paths = StatsPaths()
    paths.setPaths()

    for uploadMachine in parameters.graphicsUpLoadMachines:
        output = commands.getoutput(
            "scp %s* %s@%s:%s " %
            (paths.STATSCOLGRAPHS,
             machineParameters.getUserNameForMachine(uploadMachine),
             uploadMachine, paths.PDSCOLGRAPHS))

        print "scp %s* %s@%s:%s " % (
            paths.STATSCOLGRAPHS,
            machineParameters.getUserNameForMachine(uploadMachine),
            uploadMachine, paths.PDSCOLGRAPHS)
        print output
Esempio n. 58
0
def updateWebPages(generalParameters):
    """
        @summary : Generates all the required web pages
                   based on the language parameters found within
                   the configuration files.
            
    """

    paths = StatsPaths()
    paths.setPaths()

    generalParameters = StatsConfigParameters()
    generalParameters.getAllParameters()

    otherLanguages = []

    generatorsTypes = [
        DailyGraphicsWebPageGenerator, WeeklyGraphicsWebPageGenerator,
        MonthlyGraphicsWebPageGenerator, YearlyGraphicsWebPageGenerator,
        TotalsGraphicsWebPageGenerator
    ]

    for languagePair in generalParameters.webPagesLanguages:
        for generatorsType in generatorsTypes:
            generator = generatorsType(languagePair[0], languagePair[1])
            generator.generateWebPage()

        topWebPageGenerator = TopWebPageGenerator(languagePair[0])
        topWebPageGenerator.generateTopWebPage()
        otherLanguages.append(languagePair[0])

    try:
        while (1):
            otherLanguages.remove(generalParameters.mainApplicationLanguage)
    except:
        pass

    bottomWebPageGenerator = BottomWebPageGenerator(
        generalParameters.mainApplicationLanguage, otherLanguages)
    bottomWebPageGenerator.printWebPage()