Esempio n. 1
0
    def getSavedList( self, user, clients ):
        """
            @summary : Returns the checksum of the files contained in the saved list.
        
        """

        self.savedFileList         = {}
        
        statsPaths = StatsPaths()
        statsPaths.setPaths()
        directory = statsPaths.STATSDATA + "fileAccessVersions/"              
                
        combinedName = ""
        for client in clients:
            combinedName = combinedName + client
        
        fileName  = combinedName + "_" + user            
            
        try :
            
            self.savedFileList = CpickleWrapper.load( directory + fileName )
            
            if self.savedFileLis == None :
                self.savedFileList = {}
                
        except: # if file does not exist
            pass
        
        
        return self.savedFileList
Esempio n. 2
0
 def loadAccessFile(self):        
     """
         @summary: Loads the accessFile into the accessDictionary.
         
     """
     
     self.accessDictionary = CpickleWrapper.load( self.accessFile )
Esempio n. 3
0
    def getSavedList(self, user, clients):
        """
            @summary : Returns the checksum of the files contained in the saved list.
        
        """

        self.savedFileList = {}

        statsPaths = StatsPaths()
        statsPaths.setPaths()
        directory = statsPaths.STATSDATA + "fileAccessVersions/"

        combinedName = ""
        for client in clients:
            combinedName = combinedName + client

        fileName = combinedName + "_" + user

        try:

            self.savedFileList = CpickleWrapper.load(directory + fileName)

            if self.savedFileLis == None:
                self.savedFileList = {}

        except:  # if file does not exist
            pass

        return self.savedFileList
Esempio n. 4
0
    def loadAccessFile(self):
        """
            @summary: Loads the accessFile into the accessDictionary.
            
        """

        self.accessDictionary = CpickleWrapper.load(self.accessFile)
Esempio n. 5
0
def printPickle(pickle, outputFile=""):
    """
        Print content of a pickle file containing a FileStatscollector
        instance on the desired output.
    
        Default output is the console screen.
        
        File can be specified to make reading easier. 
    
    """

    if outputFile != "":

        fileHandle = open(outputFile, 'w')
        old_stdout = sys.stdout
        sys.stdout = fileHandle

    statsCollection = CpickleWrapper.load(pickle)

    print _("Pickle used : %s") % pickle
    print _("\n\nFiles used : %s") % statsCollection.files
    print _("Starting date: %s") % statsCollection.startTime

    print _("Interval: %s") % statsCollection.interval
    print _("End time : %s") % statsCollection.endTime
    print _("nbEntries : %s") % statsCollection.nbEntries

    for j in range(statsCollection.nbEntries):

        print _("\nEntry's interval : %s - %s ") % (
            statsCollection.fileEntries[j].startTime,
            statsCollection.fileEntries[j].endTime)
        print _("Files : ")
        print statsCollection.fileEntries[j].files
        print _("Products : ")
        print statsCollection.fileEntries[j].values.productTypes
        print _("Values :")
        print statsCollection.fileEntries[j].values.dictionary
        print _("Means :")
        print statsCollection.fileEntries[j].means
        print _("Medians")
        print statsCollection.fileEntries[j].medians
        print _("Minimums")
        print statsCollection.fileEntries[j].minimums
        print _("Maximums")
        print statsCollection.fileEntries[j].maximums
        print _("Time where max occured :")
        print statsCollection.fileEntries[j].timesWhereMaxOccured
        print _("Total")
        print statsCollection.fileEntries[j].totals
        print _("Files over maximum latency")
        print statsCollection.fileEntries[j].filesOverMaxLatency

    if outputFile != "":
        fileHandle.close()
        sys.stdout = old_stdout  #resets standard output
Esempio n. 6
0
 def mergePicklesFromDifferentHours( logger = None , startTime = "2006-07-31 13:00:00",\
                                     endTime = "2006-07-31 19:00:00", client = "satnet",\
                                     machine = "pdsPM", fileType = "tx" ):
     """
         @summary : This method merges entire hourly pickles files together. 
         
         @None    : This does not support merging part of the data of pickles.   
     
     """
     
     if logger != None :
         logger.debug( _("Call to mergeHourlyPickles received.") )
         logging = True
     else:
         logging = False
             
     pickles = []
     entries = {}
     width = StatsDateLib.getSecondsSinceEpoch( endTime ) - StatsDateLib.getSecondsSinceEpoch( startTime )
     startTime = StatsDateLib.getIsoWithRoundedHours( startTime )
     
     seperators = [startTime]
     seperators.extend( StatsDateLib.getSeparatorsWithStartTime( startTime = startTime , width=width, interval=60*StatsDateLib.MINUTE )[:-1])
         
     for seperator in seperators :
         pickles.append( StatsPickler.buildThisHoursFileName(  client = client, offset = 0, currentTime = seperator, machine = machine, fileType = fileType ) )        
     
     
     startingNumberOfEntries = 0
     #print "prior to loading and merging pickles : %s " %( StatsDateLib.getIsoFromEpoch( time.time() ) ) 
     for pickle in pickles : 
         
         if os.path.isfile( pickle ) :
             
                 
             tempCollection = CpickleWrapper.load( pickle )
             if tempCollection != None :
                 for i in xrange( len( tempCollection.fileEntries )  ):
                     entries[startingNumberOfEntries + i] = tempCollection.fileEntries[i]
                 startingNumberOfEntries = startingNumberOfEntries + len( tempCollection.fileEntries ) 
             else:                    
                 sys.exit()
         else:
                        
             emptyEntries =  PickleMerging.fillWithEmptyEntries( nbEmptyEntries = 60, entries = {} )
             for i in xrange( 60 ):
                 entries[i + startingNumberOfEntries ] = emptyEntries [i]
             startingNumberOfEntries = startingNumberOfEntries + 60
     
     #print "after the  loading and merging og pickles : %s " %( StatsDateLib.getIsoFromEpoch( time.time() ) )        
     
     statsCollection = FileStatsCollector(  startTime = startTime , endTime = endTime, interval = StatsDateLib.MINUTE, totalWidth = width, fileEntries = entries,fileType= fileType, logger = logger, logging = logging )
        
             
     return statsCollection        
Esempio n. 7
0
def printPickle( pickle, outputFile = "" ):
    """
        Print content of a pickle file containing a FileStatscollector
        instance on the desired output.
    
        Default output is the console screen.
        
        File can be specified to make reading easier. 
    
    """
        
    if outputFile != "":
       
        fileHandle = open( outputFile , 'w' )
        old_stdout = sys.stdout 
        sys.stdout = fileHandle 
    
    statsCollection = CpickleWrapper.load( pickle )
    
    print _("Pickle used : %s" )%pickle
    print _("\n\nFiles used : %s" ) %statsCollection.files
    print _("Starting date: %s" ) % statsCollection.startTime
                                
    print _("Interval: %s" ) %statsCollection.interval
    print _("End time : %s" ) %statsCollection.endTime
    print _("nbEntries : %s" ) %statsCollection.nbEntries
    
    for j in range( statsCollection.nbEntries ):
        
        print _("\nEntry's interval : %s - %s " ) %( statsCollection.fileEntries[j].startTime, statsCollection.fileEntries[j].endTime  )
        print _("Files : " )
        print statsCollection.fileEntries[j].files
        print _("Products : " )
        print statsCollection.fileEntries[j].values.productTypes
        print _("Values :" )
        print statsCollection.fileEntries[j].values.dictionary
        print _("Means :" )
        print statsCollection.fileEntries[j].means
        print _("Medians" )    
        print statsCollection.fileEntries[j].medians
        print _("Minimums" )
        print statsCollection.fileEntries[j].minimums
        print _("Maximums" )
        print statsCollection.fileEntries[j].maximums
        print _("Time where max occured :" )
        print statsCollection.fileEntries[j].timesWhereMaxOccured
        print _("Total" )
        print statsCollection.fileEntries[j].totals
        print _("Files over maximum latency" )
        print statsCollection.fileEntries[j].filesOverMaxLatency
        
    if outputFile != "":
        fileHandle.close()      
        sys.stdout = old_stdout #resets standard output 
 def changeInUpdateFrenquencyFoundDuringTimespan( self, startTime, endTime ):
     """        
         @summary : Searchs whether or not there was a change during the specified timespan.
                    
                    
         @param statTime : Start time in the iso format of the time span to survey.
         
         @param endTime :  End time in the iso format of the time span to survey/
  
         @return : True or false whether there was a change or not, plus the original 
                   frequency and the new frequency.
     """
     
     changeWasMade = False
     paths = StatsPaths()
     paths.setPaths()
     
     
     updatesDoneDuringTimespan = self.__getAutomaticUpdatesDoneDuringTimeSpan( startTime, endTime )
     updatesDoneDuringTimespan.sort()
     
     if updatesDoneDuringTimespan != []:
         
         fileName = paths.STATSTEMPAUTUPDTLOGS + str(updatesDoneDuringTimespan[0]).replace( " ", "_" )
         originalUpdateFrequency = CpickleWrapper.load(fileName)
         newUpdateFrequency = originalUpdateFrequency
         for update in updatesDoneDuringTimespan:
             fileName = paths.STATSTEMPAUTUPDTLOGS + str(update).replace( " ", "_" )
             newUpdateFrequency = CpickleWrapper.load(fileName)
             if newUpdateFrequency != originalUpdateFrequency:
                 changeWasMade = True
                 break
     
    
    
     return changeWasMade, originalUpdateFrequency, newUpdateFrequency 
Esempio n. 9
0
def getMachineParametersFromPreviousCall() :
    """
        @summary: Gets the machine parameters that are 
                  saved in data/previousMachineParameters.   
        
        @return: Returns the saved machine parameters. 
    
    """
    
    paths = StatsPaths()
    paths.setPaths()
    
    previousMachineParams = None
    if os.path.isfile( paths.STATSPREVIOUSMACHINEPARAMS ):
        previousMachineParams = CpickleWrapper.load( paths.STATSPREVIOUSMACHINEPARAMS )
    
    return  previousMachineParams  
Esempio n. 10
0
 def previousUpdateFrequency(self): 
     """   
         
         @summary : Finds and returns the frequency 
                    of the previous update.
         
         @return : The freqency of the previous update.            
                                 
     """
     
     paths = StatsPaths()
     paths.setPaths()
     
     lastUpdate = self.getTimeOfLastUpdateInLogs()
     fileName = paths.STATSTEMPAUTUPDTLOGS + str(lastUpdate).replace( " ", "_" )
     lastUpdateFrequency = CpickleWrapper.load(fileName)
     
     return  lastUpdateFrequency
Esempio n. 11
0
def getMachineParametersFromPreviousCall():
    """
        @summary: Gets the machine parameters that are 
                  saved in data/previousMachineParameters.   
        
        @return: Returns the saved machine parameters. 
    
    """

    paths = StatsPaths()
    paths.setPaths()

    previousMachineParams = None
    if os.path.isfile(paths.STATSPREVIOUSMACHINEPARAMS):
        previousMachineParams = CpickleWrapper.load(
            paths.STATSPREVIOUSMACHINEPARAMS)

    return previousMachineParams
Esempio n. 12
0
 def mergePicklesFromSameHour( logger = None , pickleNames = None, mergedPickleName = "",\
                               clientName = "" , combinedMachineName = "", currentTime = "",\
                               fileType = "tx" ):
     """
         @summary: This methods receives a list of filenames referring to pickled FileStatsEntries.
         
                   After the merger pickles get saved since they might be reused somewhere else.
         
         @precondition:  Pickle should be of the same timespan and bucket width.
                         If not no merging will occur.  
         
     """
     
     
     if logger != None : 
         logger.debug( _("Call to mergePickles received.") )
         logging = True
     else:
         logging = False
             
     entryList = []
     
     
     for pickle in pickleNames:#for every pickle we eneed to merge
         
         if os.path.isfile( pickle ):
             
             entryList.append( CpickleWrapper.load( pickle ) )
                         
         else:#Use empty entry if there is no existing pickle of that name
             
             endTime = StatsDateLib.getIsoFromEpoch( StatsDateLib.getSecondsSinceEpoch( currentTime ) + StatsDateLib.HOUR ) 
             entryList.append( FileStatsCollector( startTime = currentTime, endTime = endTime,logger =logger, logging =logging   ) )         
             
             if logger != None :
                 logger.warning( _("Pickle named %s did not exist. Empty entry was used instead.") %pickle )    
     
     
     #start off with a carbon copy of first pickle in list.
     newFSC = FileStatsCollector( files = entryList[0].files , statsTypes =  entryList[0].statsTypes, startTime = entryList[0].startTime,\
                                  endTime = entryList[0].endTime, interval=entryList[0].interval, totalWidth = entryList[0].totalWidth,\
                                  firstFilledEntry = entryList[0].firstFilledEntry, lastFilledEntry = entryList[0].lastFilledEntry,\
                                  maxLatency = entryList[0].maxLatency, fileEntries = entryList[0].fileEntries,logger = logger,\
                                  logging = logging )
              
     if PickleMerging.entryListIsValid( entryList ) == True :
         
         for i in range ( 1 , len( entryList ) ): #add other entries 
             
             for file in entryList[i].files :
                 if file not in newFSC.files :
                     newFSC.files.append( file ) 
             
             for j in range( len( newFSC.fileEntries ) ) : # add all entries                        
                 
                 newFSC.fileEntries[j].values.productTypes.extend( entryList[i].fileEntries[j].values.productTypes )
                 newFSC.fileEntries[j].files.extend( entryList[i].fileEntries[j].files )
                 newFSC.fileEntries[j].times.extend( entryList[i].fileEntries[j].times )  
                 newFSC.fileEntries[j].nbFiles = newFSC.fileEntries[j].nbFiles + ( newFSC.fileEntries[ j ].nbFiles)                    
                 
                 for type in newFSC.statsTypes :
                     newFSC.fileEntries[j].values.dictionary[type].extend( entryList[i].fileEntries[j].values.dictionary[type] ) 
                                            
                 newFSC.fileEntries[j].values.rows = newFSC.fileEntries[j].values.rows + entryList[i].fileEntries[j].values.rows
             
         newFSC = newFSC.setMinMaxMeanMedians( startingBucket = 0 , finishingBucket = newFSC.nbEntries -1 )
              
            
     else:#Did not merge pickles named. Pickle list was not valid."
         
         if logger != None :
             logger.warning( _("Did not merge pickles named : %s. Pickle list was not valid.") %pickleNames )
             logger.warning( _("Filled with empty entries instead.") %pickleNames )
             
         newFSC.fileEntries = PickleMerging.fillWithEmptyEntries( nbEmptyEntries = 60 , entries = {} )    
     
     
     #prevents us from having ro remerge file later on.    
     temp = newFSC.logger
     del newFSC.logger
     CpickleWrapper.save( newFSC, mergedPickleName )
     try:
         os.chmod( mergedPickleName, 0777 )
     except:
         pass    
     
     #print "saved :%s" %mergedPickleName
     newFSC.logger = temp
     
     return newFSC
Esempio n. 13
0
    def mergePicklesFromSameHour( logger = None , pickleNames = None, mergedPickleName = "",\
                                  clientName = "" , combinedMachineName = "", currentTime = "",\
                                  fileType = "tx" ):
        """
            @summary: This methods receives a list of filenames referring to pickled FileStatsEntries.
            
                      After the merger pickles get saved since they might be reused somewhere else.
            
            @precondition:  Pickle should be of the same timespan and bucket width.
                            If not no merging will occur.  
            
        """

        if logger != None:
            logger.debug(_("Call to mergePickles received."))
            logging = True
        else:
            logging = False

        entryList = []

        for pickle in pickleNames:  #for every pickle we eneed to merge

            if os.path.isfile(pickle):

                entryList.append(CpickleWrapper.load(pickle))

            else:  #Use empty entry if there is no existing pickle of that name

                endTime = StatsDateLib.getIsoFromEpoch(
                    StatsDateLib.getSecondsSinceEpoch(currentTime) +
                    StatsDateLib.HOUR)
                entryList.append(
                    FileStatsCollector(startTime=currentTime,
                                       endTime=endTime,
                                       logger=logger,
                                       logging=logging))

                if logger != None:
                    logger.warning(
                        _("Pickle named %s did not exist. Empty entry was used instead."
                          ) % pickle)

        #start off with a carbon copy of first pickle in list.
        newFSC = FileStatsCollector( files = entryList[0].files , statsTypes =  entryList[0].statsTypes, startTime = entryList[0].startTime,\
                                     endTime = entryList[0].endTime, interval=entryList[0].interval, totalWidth = entryList[0].totalWidth,\
                                     firstFilledEntry = entryList[0].firstFilledEntry, lastFilledEntry = entryList[0].lastFilledEntry,\
                                     maxLatency = entryList[0].maxLatency, fileEntries = entryList[0].fileEntries,logger = logger,\
                                     logging = logging )

        if PickleMerging.entryListIsValid(entryList) == True:

            for i in range(1, len(entryList)):  #add other entries

                for file in entryList[i].files:
                    if file not in newFSC.files:
                        newFSC.files.append(file)

                for j in range(len(newFSC.fileEntries)):  # add all entries

                    newFSC.fileEntries[j].values.productTypes.extend(
                        entryList[i].fileEntries[j].values.productTypes)
                    newFSC.fileEntries[j].files.extend(
                        entryList[i].fileEntries[j].files)
                    newFSC.fileEntries[j].times.extend(
                        entryList[i].fileEntries[j].times)
                    newFSC.fileEntries[j].nbFiles = newFSC.fileEntries[
                        j].nbFiles + (newFSC.fileEntries[j].nbFiles)

                    for type in newFSC.statsTypes:
                        newFSC.fileEntries[j].values.dictionary[type].extend(
                            entryList[i].fileEntries[j].values.dictionary[type]
                        )

                    newFSC.fileEntries[j].values.rows = newFSC.fileEntries[
                        j].values.rows + entryList[i].fileEntries[j].values.rows

            newFSC = newFSC.setMinMaxMeanMedians(
                startingBucket=0, finishingBucket=newFSC.nbEntries - 1)

        else:  #Did not merge pickles named. Pickle list was not valid."

            if logger != None:
                logger.warning(
                    _("Did not merge pickles named : %s. Pickle list was not valid."
                      ) % pickleNames)
                logger.warning(
                    _("Filled with empty entries instead.") % pickleNames)

            newFSC.fileEntries = PickleMerging.fillWithEmptyEntries(
                nbEmptyEntries=60, entries={})

        #prevents us from having ro remerge file later on.
        temp = newFSC.logger
        del newFSC.logger
        CpickleWrapper.save(newFSC, mergedPickleName)
        try:
            os.chmod(mergedPickleName, 0777)
        except:
            pass

        #print "saved :%s" %mergedPickleName
        newFSC.logger = temp

        return newFSC
Esempio n. 14
0
    def mergePicklesFromDifferentHours( logger = None , startTime = "2006-07-31 13:00:00",\
                                        endTime = "2006-07-31 19:00:00", client = "satnet",\
                                        machine = "pdsPM", fileType = "tx" ):
        """
            @summary : This method merges entire hourly pickles files together. 
            
            @None    : This does not support merging part of the data of pickles.   
        
        """

        if logger != None:
            logger.debug(_("Call to mergeHourlyPickles received."))
            logging = True
        else:
            logging = False

        pickles = []
        entries = {}
        width = StatsDateLib.getSecondsSinceEpoch(
            endTime) - StatsDateLib.getSecondsSinceEpoch(startTime)
        startTime = StatsDateLib.getIsoWithRoundedHours(startTime)

        seperators = [startTime]
        seperators.extend(
            StatsDateLib.getSeparatorsWithStartTime(startTime=startTime,
                                                    width=width,
                                                    interval=60 *
                                                    StatsDateLib.MINUTE)[:-1])

        for seperator in seperators:
            pickles.append(
                StatsPickler.buildThisHoursFileName(client=client,
                                                    offset=0,
                                                    currentTime=seperator,
                                                    machine=machine,
                                                    fileType=fileType))

        startingNumberOfEntries = 0
        #print "prior to loading and merging pickles : %s " %( StatsDateLib.getIsoFromEpoch( time.time() ) )
        for pickle in pickles:

            if os.path.isfile(pickle):

                tempCollection = CpickleWrapper.load(pickle)
                if tempCollection != None:
                    for i in xrange(len(tempCollection.fileEntries)):
                        entries[startingNumberOfEntries +
                                i] = tempCollection.fileEntries[i]
                    startingNumberOfEntries = startingNumberOfEntries + len(
                        tempCollection.fileEntries)
                else:
                    sys.exit()
            else:

                emptyEntries = PickleMerging.fillWithEmptyEntries(
                    nbEmptyEntries=60, entries={})
                for i in xrange(60):
                    entries[i + startingNumberOfEntries] = emptyEntries[i]
                startingNumberOfEntries = startingNumberOfEntries + 60

        #print "after the  loading and merging og pickles : %s " %( StatsDateLib.getIsoFromEpoch( time.time() ) )

        statsCollection = FileStatsCollector(startTime=startTime,
                                             endTime=endTime,
                                             interval=StatsDateLib.MINUTE,
                                             totalWidth=width,
                                             fileEntries=entries,
                                             fileType=fileType,
                                             logger=logger,
                                             logging=logging)

        return statsCollection