Esempio n. 1
0
    def containsUsefullInfo(self, fileName):
        """
            This method returns whether or not a certain file contain any data wich is within 
            the range we want.
            
            Pre-condition : self.startTime must be <= self.endTime            
                       
        """

        i = 0
        departure = ""
        usefull = False

        fileHandle = open(fileName, 'r')
        line = fileHandle.readline()

        if line != "":

            departure = FileStatsCollector.findValues(["departure"],
                                                      line)["departure"]

            if departure <= self.endTime:

                line == ""
                fileSize = os.stat(fileName)[6]

                line, offset = BackwardReader.readLineBackwards(
                    fileHandle, offset=-1, fileSize=fileSize)

                isInteresting, lineType = FileStatsCollector.isInterestingLine(
                    line, usage="departure")

                while isInteresting == False and line != "":  #in case of traceback found in file
                    line, offset = BackwardReader.readLineBackwards(
                        fileHandle, offset=offset, fileSize=fileSize)
                    isInteresting, lineType = FileStatsCollector.isInterestingLine(
                        line, usage="departure")

                lastDeparture = FileStatsCollector.findValues(
                    ["departure"], line)["departure"]

                if lastDeparture >= self.startTime:
                    usefull = True

        fileHandle.close()

        return usefull
Esempio n. 2
0
    def containsUsefullInfo(self, fileName):
        """
            This method returns whether or not a certain file contain any data wich is within 
            the range we want.
            
            Pre-condition : self.startTime must be <= self.endTime            
                       
        """

        i = 0
        departure = ""
        usefull = False

        fileHandle = open(fileName, "r")
        line = fileHandle.readline()

        if line != "":

            departure = FileStatsCollector.findValues(["departure"], line)["departure"]

            if departure <= self.endTime:

                line == ""
                fileSize = os.stat(fileName)[6]

                line, offset = BackwardReader.readLineBackwards(fileHandle, offset=-1, fileSize=fileSize)

                isInteresting, lineType = FileStatsCollector.isInterestingLine(line, usage="departure")

                while isInteresting == False and line != "":  # in case of traceback found in file
                    line, offset = BackwardReader.readLineBackwards(fileHandle, offset=offset, fileSize=fileSize)
                    isInteresting, lineType = FileStatsCollector.isInterestingLine(line, usage="departure")

                lastDeparture = FileStatsCollector.findValues(["departure"], line)["departure"]

                if lastDeparture >= self.startTime:
                    usefull = True

        fileHandle.close()

        return usefull
Esempio n. 3
0
    def __init__( self, client = "", directory = "", statsTypes = None, statsCollection = None,\
                  pickleName = "", logger = None, logging = False, machine = "pdsCSP"  ):
        """ 
            Constructor.
            -Builds a StatsPickler with no entries.           
        """

        self.client = client  # Name of the client for whom were collecting data
        self.pickleName = ""  # Pickle
        self.directory = directory  # Name of the directory containing stats files.
        self.statsTypes = statsTypes or []  # Types we'll search for stats.
        self.machine = machine  # Machine on wich the data resides.
        self.loggerName = 'pickling'  # Name of the logger.
        self.logger = logger  # Permits a logging system for this object.
        self.logging = logging  # Whether or not to enable logging.

        if self.logging == True:
            if logger is None:  # Enable logging
                if not os.path.isdir(STATSPATHS.STATSLOGGING):
                    os.makedirs(STATSPATHS.STATSLOGGING, mode=0777)
                self.logger = Logger(STATSPATHS.STATSLOGGING + 'stats_' +
                                     self.loggerName + '.log.notb',
                                     'INFO',
                                     'TX' + self.loggerName,
                                     bytes=True)
                self.logger = self.logger.getLogger()
        else:
            logger = True

        self.statsCollection = statsCollection or FileStatsCollector(
            logger=self.logger, logging=logging)
        self.fileCollection = LogFileCollector(directory=directory,
                                               logger=self.logger,
                                               logging=logging)

        global _
        _ = self.getTranslatorForModule(CURRENT_MODULE_ABS_PATH)
Esempio n. 4
0
 def mergePicklesFromSameHour( logger = None , pickleNames = None, mergedPickleName = "",\
                               clientName = "" , combinedMachineName = "", currentTime = "",\
                               fileType = "tx" ):
     """
         @summary: This methods receives a list of filenames referring to pickled FileStatsEntries.
         
                   After the merger pickles get saved since they might be reused somewhere else.
         
         @precondition:  Pickle should be of the same timespan and bucket width.
                         If not no merging will occur.  
         
     """
     
     
     if logger != None : 
         logger.debug( _("Call to mergePickles received.") )
         logging = True
     else:
         logging = False
             
     entryList = []
     
     
     for pickle in pickleNames:#for every pickle we eneed to merge
         
         if os.path.isfile( pickle ):
             
             entryList.append( CpickleWrapper.load( pickle ) )
                         
         else:#Use empty entry if there is no existing pickle of that name
             
             endTime = StatsDateLib.getIsoFromEpoch( StatsDateLib.getSecondsSinceEpoch( currentTime ) + StatsDateLib.HOUR ) 
             entryList.append( FileStatsCollector( startTime = currentTime, endTime = endTime,logger =logger, logging =logging   ) )         
             
             if logger != None :
                 logger.warning( _("Pickle named %s did not exist. Empty entry was used instead.") %pickle )    
     
     
     #start off with a carbon copy of first pickle in list.
     newFSC = FileStatsCollector( files = entryList[0].files , statsTypes =  entryList[0].statsTypes, startTime = entryList[0].startTime,\
                                  endTime = entryList[0].endTime, interval=entryList[0].interval, totalWidth = entryList[0].totalWidth,\
                                  firstFilledEntry = entryList[0].firstFilledEntry, lastFilledEntry = entryList[0].lastFilledEntry,\
                                  maxLatency = entryList[0].maxLatency, fileEntries = entryList[0].fileEntries,logger = logger,\
                                  logging = logging )
              
     if PickleMerging.entryListIsValid( entryList ) == True :
         
         for i in range ( 1 , len( entryList ) ): #add other entries 
             
             for file in entryList[i].files :
                 if file not in newFSC.files :
                     newFSC.files.append( file ) 
             
             for j in range( len( newFSC.fileEntries ) ) : # add all entries                        
                 
                 newFSC.fileEntries[j].values.productTypes.extend( entryList[i].fileEntries[j].values.productTypes )
                 newFSC.fileEntries[j].files.extend( entryList[i].fileEntries[j].files )
                 newFSC.fileEntries[j].times.extend( entryList[i].fileEntries[j].times )  
                 newFSC.fileEntries[j].nbFiles = newFSC.fileEntries[j].nbFiles + ( newFSC.fileEntries[ j ].nbFiles)                    
                 
                 for type in newFSC.statsTypes :
                     newFSC.fileEntries[j].values.dictionary[type].extend( entryList[i].fileEntries[j].values.dictionary[type] ) 
                                            
                 newFSC.fileEntries[j].values.rows = newFSC.fileEntries[j].values.rows + entryList[i].fileEntries[j].values.rows
             
         newFSC = newFSC.setMinMaxMeanMedians( startingBucket = 0 , finishingBucket = newFSC.nbEntries -1 )
              
            
     else:#Did not merge pickles named. Pickle list was not valid."
         
         if logger != None :
             logger.warning( _("Did not merge pickles named : %s. Pickle list was not valid.") %pickleNames )
             logger.warning( _("Filled with empty entries instead.") %pickleNames )
             
         newFSC.fileEntries = PickleMerging.fillWithEmptyEntries( nbEmptyEntries = 60 , entries = {} )    
     
     
     #prevents us from having ro remerge file later on.    
     temp = newFSC.logger
     del newFSC.logger
     CpickleWrapper.save( newFSC, mergedPickleName )
     try:
         os.chmod( mergedPickleName, 0777 )
     except:
         pass    
     
     #print "saved :%s" %mergedPickleName
     newFSC.logger = temp
     
     return newFSC
Esempio n. 5
0
    def mergePicklesFromSameHour( logger = None , pickleNames = None, mergedPickleName = "",\
                                  clientName = "" , combinedMachineName = "", currentTime = "",\
                                  fileType = "tx" ):
        """
            @summary: This methods receives a list of filenames referring to pickled FileStatsEntries.
            
                      After the merger pickles get saved since they might be reused somewhere else.
            
            @precondition:  Pickle should be of the same timespan and bucket width.
                            If not no merging will occur.  
            
        """

        if logger != None:
            logger.debug(_("Call to mergePickles received."))
            logging = True
        else:
            logging = False

        entryList = []

        for pickle in pickleNames:  #for every pickle we eneed to merge

            if os.path.isfile(pickle):

                entryList.append(CpickleWrapper.load(pickle))

            else:  #Use empty entry if there is no existing pickle of that name

                endTime = StatsDateLib.getIsoFromEpoch(
                    StatsDateLib.getSecondsSinceEpoch(currentTime) +
                    StatsDateLib.HOUR)
                entryList.append(
                    FileStatsCollector(startTime=currentTime,
                                       endTime=endTime,
                                       logger=logger,
                                       logging=logging))

                if logger != None:
                    logger.warning(
                        _("Pickle named %s did not exist. Empty entry was used instead."
                          ) % pickle)

        #start off with a carbon copy of first pickle in list.
        newFSC = FileStatsCollector( files = entryList[0].files , statsTypes =  entryList[0].statsTypes, startTime = entryList[0].startTime,\
                                     endTime = entryList[0].endTime, interval=entryList[0].interval, totalWidth = entryList[0].totalWidth,\
                                     firstFilledEntry = entryList[0].firstFilledEntry, lastFilledEntry = entryList[0].lastFilledEntry,\
                                     maxLatency = entryList[0].maxLatency, fileEntries = entryList[0].fileEntries,logger = logger,\
                                     logging = logging )

        if PickleMerging.entryListIsValid(entryList) == True:

            for i in range(1, len(entryList)):  #add other entries

                for file in entryList[i].files:
                    if file not in newFSC.files:
                        newFSC.files.append(file)

                for j in range(len(newFSC.fileEntries)):  # add all entries

                    newFSC.fileEntries[j].values.productTypes.extend(
                        entryList[i].fileEntries[j].values.productTypes)
                    newFSC.fileEntries[j].files.extend(
                        entryList[i].fileEntries[j].files)
                    newFSC.fileEntries[j].times.extend(
                        entryList[i].fileEntries[j].times)
                    newFSC.fileEntries[j].nbFiles = newFSC.fileEntries[
                        j].nbFiles + (newFSC.fileEntries[j].nbFiles)

                    for type in newFSC.statsTypes:
                        newFSC.fileEntries[j].values.dictionary[type].extend(
                            entryList[i].fileEntries[j].values.dictionary[type]
                        )

                    newFSC.fileEntries[j].values.rows = newFSC.fileEntries[
                        j].values.rows + entryList[i].fileEntries[j].values.rows

            newFSC = newFSC.setMinMaxMeanMedians(
                startingBucket=0, finishingBucket=newFSC.nbEntries - 1)

        else:  #Did not merge pickles named. Pickle list was not valid."

            if logger != None:
                logger.warning(
                    _("Did not merge pickles named : %s. Pickle list was not valid."
                      ) % pickleNames)
                logger.warning(
                    _("Filled with empty entries instead.") % pickleNames)

            newFSC.fileEntries = PickleMerging.fillWithEmptyEntries(
                nbEmptyEntries=60, entries={})

        #prevents us from having ro remerge file later on.
        temp = newFSC.logger
        del newFSC.logger
        CpickleWrapper.save(newFSC, mergedPickleName)
        try:
            os.chmod(mergedPickleName, 0777)
        except:
            pass

        #print "saved :%s" %mergedPickleName
        newFSC.logger = temp

        return newFSC
Esempio n. 6
0
    def mergePicklesFromDifferentHours( logger = None , startTime = "2006-07-31 13:00:00",\
                                        endTime = "2006-07-31 19:00:00", client = "satnet",\
                                        machine = "pdsPM", fileType = "tx" ):
        """
            @summary : This method merges entire hourly pickles files together. 
            
            @None    : This does not support merging part of the data of pickles.   
        
        """

        if logger != None:
            logger.debug(_("Call to mergeHourlyPickles received."))
            logging = True
        else:
            logging = False

        pickles = []
        entries = {}
        width = StatsDateLib.getSecondsSinceEpoch(
            endTime) - StatsDateLib.getSecondsSinceEpoch(startTime)
        startTime = StatsDateLib.getIsoWithRoundedHours(startTime)

        seperators = [startTime]
        seperators.extend(
            StatsDateLib.getSeparatorsWithStartTime(startTime=startTime,
                                                    width=width,
                                                    interval=60 *
                                                    StatsDateLib.MINUTE)[:-1])

        for seperator in seperators:
            pickles.append(
                StatsPickler.buildThisHoursFileName(client=client,
                                                    offset=0,
                                                    currentTime=seperator,
                                                    machine=machine,
                                                    fileType=fileType))

        startingNumberOfEntries = 0
        #print "prior to loading and merging pickles : %s " %( StatsDateLib.getIsoFromEpoch( time.time() ) )
        for pickle in pickles:

            if os.path.isfile(pickle):

                tempCollection = CpickleWrapper.load(pickle)
                if tempCollection != None:
                    for i in xrange(len(tempCollection.fileEntries)):
                        entries[startingNumberOfEntries +
                                i] = tempCollection.fileEntries[i]
                    startingNumberOfEntries = startingNumberOfEntries + len(
                        tempCollection.fileEntries)
                else:
                    sys.exit()
            else:

                emptyEntries = PickleMerging.fillWithEmptyEntries(
                    nbEmptyEntries=60, entries={})
                for i in xrange(60):
                    entries[i + startingNumberOfEntries] = emptyEntries[i]
                startingNumberOfEntries = startingNumberOfEntries + 60

        #print "after the  loading and merging og pickles : %s " %( StatsDateLib.getIsoFromEpoch( time.time() ) )

        statsCollection = FileStatsCollector(startTime=startTime,
                                             endTime=endTime,
                                             interval=StatsDateLib.MINUTE,
                                             totalWidth=width,
                                             fileEntries=entries,
                                             fileType=fileType,
                                             logger=logger,
                                             logging=logging)

        return statsCollection
Esempio n. 7
0
class StatsPickler(Translatable):
    """
        Contains all the methods needed to pickle stats for a certain client.
        
    """

    def __init__( self, client = "", directory = "", statsTypes = None, statsCollection = None,\
                  pickleName = "", logger = None, logging = False, machine = "pdsCSP"  ):
        """ 
            Constructor.
            -Builds a StatsPickler with no entries.           
        """

        self.client = client  # Name of the client for whom were collecting data
        self.pickleName = ""  # Pickle
        self.directory = directory  # Name of the directory containing stats files.
        self.statsTypes = statsTypes or []  # Types we'll search for stats.
        self.machine = machine  # Machine on wich the data resides.
        self.loggerName = 'pickling'  # Name of the logger.
        self.logger = logger  # Permits a logging system for this object.
        self.logging = logging  # Whether or not to enable logging.

        if self.logging == True:
            if logger is None:  # Enable logging
                if not os.path.isdir(STATSPATHS.STATSLOGGING):
                    os.makedirs(STATSPATHS.STATSLOGGING, mode=0777)
                self.logger = Logger(STATSPATHS.STATSLOGGING + 'stats_' +
                                     self.loggerName + '.log.notb',
                                     'INFO',
                                     'TX' + self.loggerName,
                                     bytes=True)
                self.logger = self.logger.getLogger()
        else:
            logger = True

        self.statsCollection = statsCollection or FileStatsCollector(
            logger=self.logger, logging=logging)
        self.fileCollection = LogFileCollector(directory=directory,
                                               logger=self.logger,
                                               logging=logging)

        global _
        _ = self.getTranslatorForModule(CURRENT_MODULE_ABS_PATH)

    def buildThisHoursFileName(client="someclient",
                               offset=0,
                               currentTime="",
                               fileType="tx",
                               machine="someMachineName"):
        """ 
            @summary : Builds a filename using current currentTime.
            
            @Note : The format will be something like this :
                    StatsPaths.STATSPICKLES/clientName/date/TXorRX//machine_hour
                    Ex : StatsPaths.STATSPICKLES/clientName/20060707/tx/machinex_12:00:00
            
                    offset can be used to find a file from an hour close to the current one 
            
                    tempcurrentTime can also be used to build a filename from another hour. 
            
            
            @warning :To be used only with pickles created hourly.
                
        """

        timeFolder = ""

        if currentTime == "":
            currentTime = time.time()
        else:
            currentTime = StatsDateLib.getSecondsSinceEpoch(currentTime)

        currentTime = currentTime + (offset * StatsDateLib.HOUR)
        splitTime = time.gmtime(currentTime)

        for i in range(3):

            if int(splitTime[i]) < 10:
                timeFolder = timeFolder + "0" + str(splitTime[i])
            else:
                timeFolder = timeFolder + str(splitTime[i])

        hour = StatsDateLib.getHoursFromIso(
            StatsDateLib.getIsoFromEpoch(currentTime))

        maxLt = (os.statvfs(STATSPATHS.STATSPICKLES)[statvfs.F_NAMEMAX])

        fileName = ("%s" + "%." + str(maxLt) + "s/%s/%s/%." + str(maxLt) +
                    "s_%s") % (STATSPATHS.STATSPICKLES, client, timeFolder,
                               fileType, str(machine), str(hour))

        return fileName

    buildThisHoursFileName = staticmethod(buildThisHoursFileName)

    def collectStats(self,
                     types,
                     directory,
                     fileType="tx",
                     startTime='2006-05-18 00:00:00',
                     endTime="",
                     interval=60 * StatsDateLib.MINUTE,
                     save=True):
        """
            @summary : This method is used to collect stats from logfiles found within a directory.
            
                        Types is the type of dats to be collected. 
                        
                        Pickle is the name of the file to be used. If not specified will be generated
                        according to the other parameters.
                        
                        FileType specifies what type of files to look for in the directory.
                        
                        StartTime and endtime specify the boundaries within wich we'll collect the data. 
                        
                        Interval the width of the entries in the stats collection 
                            
                        save can be false if for some reason user does not want to save pickle.            
                                   
                        If both  of the above are true, hourly pickles will be done.
                        
                        Pre-conditions : StarTime needs to be smaller than endTime.
                                         
                                         If Daily pickling is used,width between start 
                                         and endTime needs to be no more than 24hours
                                         
                                         If Hourly pickling is used,width between start 
                                         and endTime needs to be no more than 1hour.
                                           
                    
                        If pre-conditions aren't met, application will fail.
            
        """

        global _

        #Find up to date file list.
        self.fileCollection =  LogFileCollector( startTime  = startTime , endTime = endTime, directory = directory, lastLineRead = "",\
                                                 logType = fileType, name = self.client, logger = self.logger, logging = self.logging )

        temp = self.logger  #Need to remove current logger temporarily
        del self.logger
        self.fileCollection.collectEntries()  #find all entries from the folder
        self.logger = temp

        if self.fileCollection.logger != None:  #No longer need the logger
            self.fileCollection.logger = None

        if os.path.isfile(self.pickleName):

            if self.logger != None:
                self.logger.warning(
                    _("User tried to modify allready filled pickle file."))
                self.logger.warning(
                    _("Pickle was named : %s") % self.pickleName)

        # Creates a new FileStats collector wich spans from the very
        # start of the hour up till the end.

        if self.pickleName == "":
            self.pickleName = StatsPickler.buildThisHoursFileName(
                client=self.client,
                currentTime=startTime,
                machine=self.machine,
                fileType=fileType)


        self.statsCollection = FileStatsCollector( files = self.fileCollection.entries, fileType = fileType, statsTypes = types,\
                                                   startTime = StatsDateLib.getIsoWithRoundedHours( startTime ), endTime = endTime,\
                                                   interval = interval, totalWidth = 1*StatsDateLib.HOUR, logger = self.logger,logging = self.logging )

        #Temporarily delete logger to make sure no duplicated lines appears in log file.
        temp = self.logger
        del self.logger
        self.statsCollection.collectStats(endTime)
        self.logger = temp

        if save == True:  # must remove logger temporarily. Cannot saved opened files.

            if self.statsCollection.logger != None:
                temp = self.statsCollection.logger
                del self.statsCollection.logger
                loggerNeedsToBeReplaced = True

            CpickleWrapper.save(object=self.statsCollection,
                                filename=self.pickleName)

            try:
                os.chmod(self.pickleName, 0777)

                dirname = os.path.dirname(self.pickleName)

                while (dirname != STATSPATHS.STATSPICKLES[:-1]
                       ):  #[:-1] removes the last / character

                    try:
                        os.chmod(dirname, 0777)
                    except:
                        pass

                    dirname = os.path.dirname(dirname)

            except:
                pass

            if loggerNeedsToBeReplaced:
                self.statsCollection.logger = temp

            if self.logger != None:
                self.logger.info(
                    _("Saved pickle named : %s ") % self.pickleName)

    def printStats(self):
        """
            @summary : This utility method prints out all the stats concerning each files. 
                    
            @note : ****Mostly usefull for debugging****
                     
        """
        global _
        absoluteFilename = str(
            STATSPATHS.STATSDATA) + "TEST_OUTPUT_FILE_FOR_STATSPICKLER "
        print _("Output filename used : %s") % absoluteFilename
        fileHandle = open(absoluteFilename, 'w')
        old_stdout = sys.stdout
        sys.stdout = fileHandle

        print _("\n\nFiles used : %s") % self.fileCollection.entries
        print _("Starting date: %s") % self.statsCollection.startTime

        print "Interval: %s" % self.statsCollection.interval
        print "endTime: %s" % self.statsCollection.endTime

        for j in range(self.statsCollection.nbEntries):
            print _("\nEntry's interval : %s - %s ") % (
                self.statsCollection.fileEntries[j].startTime,
                self.statsCollection.fileEntries[j].endTime)
            print _("Values :")
            print self.statsCollection.fileEntries[j].values.dictionary
            print _("Means :")
            print self.statsCollection.fileEntries[j].means
            print _("Medians")
            print self.statsCollection.fileEntries[j].medians
            print _("Minimums")
            print self.statsCollection.fileEntries[j].minimums
            print _("Maximums")
            print self.statsCollection.fileEntries[j].maximums
            print _("Total")
            print self.statsCollection.fileEntries[j].totals

        fileHandle.close()
        sys.stdout = old_stdout  #resets standard output
        print _("Printed %s ") % absoluteFilename
Esempio n. 8
0
    def collectStats(self,
                     types,
                     directory,
                     fileType="tx",
                     startTime='2006-05-18 00:00:00',
                     endTime="",
                     interval=60 * StatsDateLib.MINUTE,
                     save=True):
        """
            @summary : This method is used to collect stats from logfiles found within a directory.
            
                        Types is the type of dats to be collected. 
                        
                        Pickle is the name of the file to be used. If not specified will be generated
                        according to the other parameters.
                        
                        FileType specifies what type of files to look for in the directory.
                        
                        StartTime and endtime specify the boundaries within wich we'll collect the data. 
                        
                        Interval the width of the entries in the stats collection 
                            
                        save can be false if for some reason user does not want to save pickle.            
                                   
                        If both  of the above are true, hourly pickles will be done.
                        
                        Pre-conditions : StarTime needs to be smaller than endTime.
                                         
                                         If Daily pickling is used,width between start 
                                         and endTime needs to be no more than 24hours
                                         
                                         If Hourly pickling is used,width between start 
                                         and endTime needs to be no more than 1hour.
                                           
                    
                        If pre-conditions aren't met, application will fail.
            
        """

        global _

        #Find up to date file list.
        self.fileCollection =  LogFileCollector( startTime  = startTime , endTime = endTime, directory = directory, lastLineRead = "",\
                                                 logType = fileType, name = self.client, logger = self.logger, logging = self.logging )

        temp = self.logger  #Need to remove current logger temporarily
        del self.logger
        self.fileCollection.collectEntries()  #find all entries from the folder
        self.logger = temp

        if self.fileCollection.logger != None:  #No longer need the logger
            self.fileCollection.logger = None

        if os.path.isfile(self.pickleName):

            if self.logger != None:
                self.logger.warning(
                    _("User tried to modify allready filled pickle file."))
                self.logger.warning(
                    _("Pickle was named : %s") % self.pickleName)

        # Creates a new FileStats collector wich spans from the very
        # start of the hour up till the end.

        if self.pickleName == "":
            self.pickleName = StatsPickler.buildThisHoursFileName(
                client=self.client,
                currentTime=startTime,
                machine=self.machine,
                fileType=fileType)


        self.statsCollection = FileStatsCollector( files = self.fileCollection.entries, fileType = fileType, statsTypes = types,\
                                                   startTime = StatsDateLib.getIsoWithRoundedHours( startTime ), endTime = endTime,\
                                                   interval = interval, totalWidth = 1*StatsDateLib.HOUR, logger = self.logger,logging = self.logging )

        #Temporarily delete logger to make sure no duplicated lines appears in log file.
        temp = self.logger
        del self.logger
        self.statsCollection.collectStats(endTime)
        self.logger = temp

        if save == True:  # must remove logger temporarily. Cannot saved opened files.

            if self.statsCollection.logger != None:
                temp = self.statsCollection.logger
                del self.statsCollection.logger
                loggerNeedsToBeReplaced = True

            CpickleWrapper.save(object=self.statsCollection,
                                filename=self.pickleName)

            try:
                os.chmod(self.pickleName, 0777)

                dirname = os.path.dirname(self.pickleName)

                while (dirname != STATSPATHS.STATSPICKLES[:-1]
                       ):  #[:-1] removes the last / character

                    try:
                        os.chmod(dirname, 0777)
                    except:
                        pass

                    dirname = os.path.dirname(dirname)

            except:
                pass

            if loggerNeedsToBeReplaced:
                self.statsCollection.logger = temp

            if self.logger != None:
                self.logger.info(
                    _("Saved pickle named : %s ") % self.pickleName)
Esempio n. 9
0
class StatsPickler(Translatable):
    """
        Contains all the methods needed to pickle stats for a certain client.
        
    """
    
    def __init__( self, client = "", directory = "", statsTypes = None, statsCollection = None,\
                  pickleName = "", logger = None, logging = False, machine = "pdsCSP"  ):
        """ 
            Constructor.
            -Builds a StatsPickler with no entries.           
        """
        
        self.client           = client                 # Name of the client for whom were collecting data 
        self.pickleName       = ""                     # Pickle 
        self.directory        = directory              # Name of the directory containing stats files.
        self.statsTypes       = statsTypes or []       # Types we'll search for stats. 
        self.machine          = machine                # Machine on wich the data resides.
        self.loggerName       = 'pickling'             # Name of the logger.             
        self.logger           = logger                 # Permits a logging system for this object.
        self.logging          = logging                # Whether or not to enable logging.      
        
        if self.logging == True:
            if logger is None: # Enable logging
                if not os.path.isdir( STATSPATHS.STATSLOGGING ):
                    os.makedirs( STATSPATHS.STATSLOGGING , mode=0777 )
                self.logger = Logger( STATSPATHS.STATSLOGGING + 'stats_' + self.loggerName + '.log.notb', 'INFO', 'TX' + self.loggerName, bytes = True  ) 
                self.logger = self.logger.getLogger()
        else:
            logger = True
               
        self.statsCollection  = statsCollection or FileStatsCollector( logger = self.logger, logging = logging )
        self.fileCollection   = LogFileCollector( directory = directory, logger = self.logger, logging = logging )       
        
        global _ 
        _ = self.getTranslatorForModule(CURRENT_MODULE_ABS_PATH)
        
        
        
    def buildThisHoursFileName(  client = "someclient", offset = 0, currentTime = "", fileType = "tx", machine = "someMachineName" ):
        """ 
            @summary : Builds a filename using current currentTime.
            
            @Note : The format will be something like this :
                    StatsPaths.STATSPICKLES/clientName/date/TXorRX//machine_hour
                    Ex : StatsPaths.STATSPICKLES/clientName/20060707/tx/machinex_12:00:00
            
                    offset can be used to find a file from an hour close to the current one 
            
                    tempcurrentTime can also be used to build a filename from another hour. 
            
            
            @warning :To be used only with pickles created hourly.
                
        """    
        
        timeFolder = ""
               
        if currentTime == "":
            currentTime = time.time()
        else:
            currentTime = StatsDateLib.getSecondsSinceEpoch( currentTime )    
        
        currentTime = currentTime + ( offset * StatsDateLib.HOUR )
        splitTime = time.gmtime( currentTime )    
                
        for i in range( 3 ):
            
            if int( splitTime[i] ) < 10 :
                timeFolder = timeFolder + "0" + str( splitTime[i] )
            else:
                timeFolder = timeFolder + str( splitTime[i] )          
        
                
        hour = StatsDateLib.getHoursFromIso( StatsDateLib.getIsoFromEpoch( currentTime ) )
        
        maxLt = ( os.statvfs( STATSPATHS.STATSPICKLES )[statvfs.F_NAMEMAX])
        
        fileName = ( "%s" + "%." +  str( maxLt ) + "s/%s/%s/%." + str( maxLt ) + "s_%s" )   %( STATSPATHS.STATSPICKLES, client, timeFolder,  fileType, str(machine),  str(hour) )  
                
        return fileName 
          
    
    buildThisHoursFileName = staticmethod( buildThisHoursFileName )    
    
    
    
    def collectStats( self, types, directory, fileType = "tx", startTime = '2006-05-18 00:00:00', endTime = "", interval = 60*StatsDateLib.MINUTE, save = True  ):
        """
            @summary : This method is used to collect stats from logfiles found within a directory.
            
                        Types is the type of dats to be collected. 
                        
                        Pickle is the name of the file to be used. If not specified will be generated
                        according to the other parameters.
                        
                        FileType specifies what type of files to look for in the directory.
                        
                        StartTime and endtime specify the boundaries within wich we'll collect the data. 
                        
                        Interval the width of the entries in the stats collection 
                            
                        save can be false if for some reason user does not want to save pickle.            
                                   
                        If both  of the above are true, hourly pickles will be done.
                        
                        Pre-conditions : StarTime needs to be smaller than endTime.
                                         
                                         If Daily pickling is used,width between start 
                                         and endTime needs to be no more than 24hours
                                         
                                         If Hourly pickling is used,width between start 
                                         and endTime needs to be no more than 1hour.
                                           
                    
                        If pre-conditions aren't met, application will fail.
            
        """     
        
        global _ 
        
        #Find up to date file list. 
        self.fileCollection =  LogFileCollector( startTime  = startTime , endTime = endTime, directory = directory, lastLineRead = "",\
                                                 logType = fileType, name = self.client, logger = self.logger, logging = self.logging )   
        
        
        temp  = self.logger#Need to remove current logger temporarily
        del self.logger
        self.fileCollection.collectEntries()          #find all entries from the folder
        self.logger = temp 
        
        
        if self.fileCollection.logger != None : #No longer need the logger 
            self.fileCollection.logger = None  
                
        if os.path.isfile( self.pickleName ):
            
            if self.logger != None :
                self.logger.warning( _("User tried to modify allready filled pickle file." ) )
                self.logger.warning( _("Pickle was named : %s") %self.pickleName )      
            
        
        # Creates a new FileStats collector wich spans from the very 
        # start of the hour up till the end. 
        
        if self.pickleName == "":
            self.pickleName = StatsPickler.buildThisHoursFileName( client = self.client, currentTime = startTime, machine = self.machine, fileType = fileType )
    
            
        self.statsCollection = FileStatsCollector( files = self.fileCollection.entries, fileType = fileType, statsTypes = types,\
                                                   startTime = StatsDateLib.getIsoWithRoundedHours( startTime ), endTime = endTime,\
                                                   interval = interval, totalWidth = 1*StatsDateLib.HOUR, logger = self.logger,logging = self.logging )
        
        #Temporarily delete logger to make sure no duplicated lines appears in log file.
        temp  = self.logger
        del self.logger
        self.statsCollection.collectStats( endTime )    
        self.logger = temp
            
    
        if save == True :# must remove logger temporarily. Cannot saved opened files.
            
            if self.statsCollection.logger != None:     
                temp = self.statsCollection.logger
                del self.statsCollection.logger
                loggerNeedsToBeReplaced = True 
            
            CpickleWrapper.save ( object = self.statsCollection, filename = self.pickleName ) 
            
            try:
                os.chmod(self.pickleName, 0777)
                                 
                dirname = os.path.dirname( self.pickleName )                                                  
                
                while( dirname != STATSPATHS.STATSPICKLES[:-1] ):#[:-1] removes the last / character 
                    
                    try:
                        os.chmod( dirname, 0777 )
                    except:
                        pass
                    
                    dirname = os.path.dirname(dirname)
                    
            except:
                pass    
            
            if loggerNeedsToBeReplaced :  
                self.statsCollection.logger = temp
            
            if self.logger != None:
                self.logger.info( _("Saved pickle named : %s ") %self.pickleName )                          
               
                
    
             
    def printStats( self ) :       
        """
            @summary : This utility method prints out all the stats concerning each files. 
                    
            @note : ****Mostly usefull for debugging****
                     
        """    
        global _ 
        absoluteFilename = str( STATSPATHS.STATSDATA ) + "TEST_OUTPUT_FILE_FOR_STATSPICKLER "
        print _("Output filename used : %s") %absoluteFilename
        fileHandle = open( absoluteFilename , 'w' )
        old_stdout = sys.stdout 
        sys.stdout = fileHandle 
        
        print _("\n\nFiles used : %s") %self.fileCollection.entries
        print _("Starting date: %s") % self.statsCollection.startTime
                                    
        print "Interval: %s" %self.statsCollection.interval
        print "endTime: %s" %self.statsCollection.endTime

        for j in range( self.statsCollection.nbEntries ):
            print _("\nEntry's interval : %s - %s ") %(self.statsCollection.fileEntries[j].startTime,self.statsCollection.fileEntries[j].endTime)
            print _("Values :")
            print self.statsCollection.fileEntries[j].values.dictionary
            print _("Means :")
            print self.statsCollection.fileEntries[j].means
            print _("Medians" )   
            print self.statsCollection.fileEntries[j].medians
            print _("Minimums")
            print self.statsCollection.fileEntries[j].minimums
            print _("Maximums")
            print self.statsCollection.fileEntries[j].maximums
            print _("Total")
            print self.statsCollection.fileEntries[j].totals
    

            
        fileHandle.close()      
        sys.stdout = old_stdout #resets standard output  
        print _("Printed %s ") %absoluteFilename
Esempio n. 10
0
 def collectStats( self, types, directory, fileType = "tx", startTime = '2006-05-18 00:00:00', endTime = "", interval = 60*StatsDateLib.MINUTE, save = True  ):
     """
         @summary : This method is used to collect stats from logfiles found within a directory.
         
                     Types is the type of dats to be collected. 
                     
                     Pickle is the name of the file to be used. If not specified will be generated
                     according to the other parameters.
                     
                     FileType specifies what type of files to look for in the directory.
                     
                     StartTime and endtime specify the boundaries within wich we'll collect the data. 
                     
                     Interval the width of the entries in the stats collection 
                         
                     save can be false if for some reason user does not want to save pickle.            
                                
                     If both  of the above are true, hourly pickles will be done.
                     
                     Pre-conditions : StarTime needs to be smaller than endTime.
                                      
                                      If Daily pickling is used,width between start 
                                      and endTime needs to be no more than 24hours
                                      
                                      If Hourly pickling is used,width between start 
                                      and endTime needs to be no more than 1hour.
                                        
                 
                     If pre-conditions aren't met, application will fail.
         
     """     
     
     global _ 
     
     #Find up to date file list. 
     self.fileCollection =  LogFileCollector( startTime  = startTime , endTime = endTime, directory = directory, lastLineRead = "",\
                                              logType = fileType, name = self.client, logger = self.logger, logging = self.logging )   
     
     
     temp  = self.logger#Need to remove current logger temporarily
     del self.logger
     self.fileCollection.collectEntries()          #find all entries from the folder
     self.logger = temp 
     
     
     if self.fileCollection.logger != None : #No longer need the logger 
         self.fileCollection.logger = None  
             
     if os.path.isfile( self.pickleName ):
         
         if self.logger != None :
             self.logger.warning( _("User tried to modify allready filled pickle file." ) )
             self.logger.warning( _("Pickle was named : %s") %self.pickleName )      
         
     
     # Creates a new FileStats collector wich spans from the very 
     # start of the hour up till the end. 
     
     if self.pickleName == "":
         self.pickleName = StatsPickler.buildThisHoursFileName( client = self.client, currentTime = startTime, machine = self.machine, fileType = fileType )
 
         
     self.statsCollection = FileStatsCollector( files = self.fileCollection.entries, fileType = fileType, statsTypes = types,\
                                                startTime = StatsDateLib.getIsoWithRoundedHours( startTime ), endTime = endTime,\
                                                interval = interval, totalWidth = 1*StatsDateLib.HOUR, logger = self.logger,logging = self.logging )
     
     #Temporarily delete logger to make sure no duplicated lines appears in log file.
     temp  = self.logger
     del self.logger
     self.statsCollection.collectStats( endTime )    
     self.logger = temp
         
 
     if save == True :# must remove logger temporarily. Cannot saved opened files.
         
         if self.statsCollection.logger != None:     
             temp = self.statsCollection.logger
             del self.statsCollection.logger
             loggerNeedsToBeReplaced = True 
         
         CpickleWrapper.save ( object = self.statsCollection, filename = self.pickleName ) 
         
         try:
             os.chmod(self.pickleName, 0777)
                              
             dirname = os.path.dirname( self.pickleName )                                                  
             
             while( dirname != STATSPATHS.STATSPICKLES[:-1] ):#[:-1] removes the last / character 
                 
                 try:
                     os.chmod( dirname, 0777 )
                 except:
                     pass
                 
                 dirname = os.path.dirname(dirname)
                 
         except:
             pass    
         
         if loggerNeedsToBeReplaced :  
             self.statsCollection.logger = temp
         
         if self.logger != None:
             self.logger.info( _("Saved pickle named : %s ") %self.pickleName )