Example #1
0
def askUserAboutUpdatingLogs( infos ):
    """
        @Summary : Asks user about whether or not
                   he wants to update the log files
                   on his machine.
        
        @returns True or False              
    """
    
    updateLofFiles = False
    os.system( "clear" )
    showPresentation()
    print ""
    print ""
    print "***************** Important note *****************" 
    print "Collection or recollection of pickle files "
    print "is closely linked to the log files found on this machine."
    
    if StatsDateLib.getIsoWithRoundedHours( infos.picklesRecollectionStartTime ) != StatsDateLib.getIsoWithRoundedHours( StatsDateLib.getIsoFromEpoch( time.time() )) : 
        print "Data recollection is set to take place up to the current hour."
        print "For the end of the recollection it is recommended that log file be updated."
        print "However, if the recollection spans over a long while and that the log file currently "
        print "on this machine are 'old' and cover the start of the recollection,"
        print "updating log files might cause you to loose some or all of those old files."
        
    else :
        print "Data recollection is set to end PRIOR to the current hour."
        print "In this case, log file updates are usually useless."
        print "In the case where the span between the start of the recollection "
        print "is longer than the span covered by the currently accessible log files, "
        print "usefull log files will be lsot by updating them."
        print "However the opposite could also be true. If problems occured and "
        print "databases are seriously outdated, updating them will be the only solution "
        print "capable of making some or all the needed log file data accessible for pickling."
    
    
    print ""
    print "***Please review log files prior to specifying whether or not you want to update them or not.***"
    print ""
    input = raw_input( "Do you want to update log files ? ( y or n ) : " )
       
    while ( str( input ).lower() != 'n' and   str( input ).lower() != 'y' ):
        print "Please enter one of the following choices : y/Y or n/N."
        input = raw_input( "Do you want to update log files ? ( y or n ) : " )
   
    if str( input ).lower() == 'y' :
        print "Log files will be updated."
        updateLofFiles =  True
    else:
        print "Log files will not be updated."
    
    
    
    return updateLofFiles
Example #2
0
def askUserAboutUpdatingLogs(infos):
    """
        @Summary : Asks user about whether or not
                   he wants to update the log files
                   on his machine.
        
        @returns True or False              
    """

    updateLofFiles = False
    os.system("clear")
    showPresentation()
    print ""
    print ""
    print "***************** Important note *****************"
    print "Collection or recollection of pickle files "
    print "is closely linked to the log files found on this machine."

    if StatsDateLib.getIsoWithRoundedHours(
            infos.picklesRecollectionStartTime
    ) != StatsDateLib.getIsoWithRoundedHours(
            StatsDateLib.getIsoFromEpoch(time.time())):
        print "Data recollection is set to take place up to the current hour."
        print "For the end of the recollection it is recommended that log file be updated."
        print "However, if the recollection spans over a long while and that the log file currently "
        print "on this machine are 'old' and cover the start of the recollection,"
        print "updating log files might cause you to loose some or all of those old files."

    else:
        print "Data recollection is set to end PRIOR to the current hour."
        print "In this case, log file updates are usually useless."
        print "In the case where the span between the start of the recollection "
        print "is longer than the span covered by the currently accessible log files, "
        print "usefull log files will be lsot by updating them."
        print "However the opposite could also be true. If problems occured and "
        print "databases are seriously outdated, updating them will be the only solution "
        print "capable of making some or all the needed log file data accessible for pickling."

    print ""
    print "***Please review log files prior to specifying whether or not you want to update them or not.***"
    print ""
    input = raw_input("Do you want to update log files ? ( y or n ) : ")

    while (str(input).lower() != 'n' and str(input).lower() != 'y'):
        print "Please enter one of the following choices : y/Y or n/N."
        input = raw_input("Do you want to update log files ? ( y or n ) : ")

    if str(input).lower() == 'y':
        print "Log files will be updated."
        updateLofFiles = True
    else:
        print "Log files will not be updated."

    return updateLofFiles
Example #3
0
def main():
    """
        @summary: Small test case to see if everything works fine 
        
    """

    statsConfig = StatsConfigParameters()
    statsConfig.getAllParameters()
    machineconfig = MachineConfigParameters()
    machineconfig.getParametersFromMachineConfigurationFile()

    currentTimeEpochFormat = time.time() - (120 * 60)

    endTime = StatsDateLib.getIsoWithRoundedHours(
        StatsDateLib.getIsoFromEpoch(currentTimeEpochFormat))
    startTime = StatsDateLib.getIsoWithRoundedHours(
        StatsDateLib.getIsoFromEpoch(currentTimeEpochFormat -
                                     (StatsDateLib.DAY * 7)))
    print startTime, endTime
    groupName = statsConfig.groupParameters.groups[0]
    clients = statsConfig.groupParameters.groupsMembers[groupName]
    machines = statsConfig.groupParameters.groupsMachines[groupName]
    fileType = statsConfig.groupParameters.groupFileTypes[groupName]

    seperators = [startTime]
    seperators.extend(
        StatsDateLib.getSeparatorsWithStartTime(
            startTime=startTime,
            width=StatsDateLib.DAY * 7,
            interval=StatsDateLib.HOUR)[:-1])

    listOfFiles = PickleMerging.createMergedPicklesList(
        startTime, endTime, clients, groupName, fileType, machines, seperators)
    listOfFileSizes = MemoryManagement.getListOfFileSizes(listOfFiles)
    currentFreeMemory = MemoryManagement.getCurrentFreeMemory(0.55555)

    if MemoryManagement.getTotalSizeListOfFiles(
            listOfFiles) > currentFreeMemory:

        seperators = MemoryManagement.getSeperatorsForHourlyTreatments(
            startTime, endTime, currentFreeMemory, listOfFileSizes)
        print seperators

    else:
        print "We have %s bytes free and the pickles require %s bytes" % (
            currentFreeMemory, getTotalSizeListOfFiles(listOfFiles))

        print "we have enough memory to merge all these pickles."
Example #4
0
 def mergePicklesFromDifferentHours( logger = None , startTime = "2006-07-31 13:00:00",\
                                     endTime = "2006-07-31 19:00:00", client = "satnet",\
                                     machine = "pdsPM", fileType = "tx" ):
     """
         @summary : This method merges entire hourly pickles files together. 
         
         @None    : This does not support merging part of the data of pickles.   
     
     """
     
     if logger != None :
         logger.debug( _("Call to mergeHourlyPickles received.") )
         logging = True
     else:
         logging = False
             
     pickles = []
     entries = {}
     width = StatsDateLib.getSecondsSinceEpoch( endTime ) - StatsDateLib.getSecondsSinceEpoch( startTime )
     startTime = StatsDateLib.getIsoWithRoundedHours( startTime )
     
     seperators = [startTime]
     seperators.extend( StatsDateLib.getSeparatorsWithStartTime( startTime = startTime , width=width, interval=60*StatsDateLib.MINUTE )[:-1])
         
     for seperator in seperators :
         pickles.append( StatsPickler.buildThisHoursFileName(  client = client, offset = 0, currentTime = seperator, machine = machine, fileType = fileType ) )        
     
     
     startingNumberOfEntries = 0
     #print "prior to loading and merging pickles : %s " %( StatsDateLib.getIsoFromEpoch( time.time() ) ) 
     for pickle in pickles : 
         
         if os.path.isfile( pickle ) :
             
                 
             tempCollection = CpickleWrapper.load( pickle )
             if tempCollection != None :
                 for i in xrange( len( tempCollection.fileEntries )  ):
                     entries[startingNumberOfEntries + i] = tempCollection.fileEntries[i]
                 startingNumberOfEntries = startingNumberOfEntries + len( tempCollection.fileEntries ) 
             else:                    
                 sys.exit()
         else:
                        
             emptyEntries =  PickleMerging.fillWithEmptyEntries( nbEmptyEntries = 60, entries = {} )
             for i in xrange( 60 ):
                 entries[i + startingNumberOfEntries ] = emptyEntries [i]
             startingNumberOfEntries = startingNumberOfEntries + 60
     
     #print "after the  loading and merging og pickles : %s " %( StatsDateLib.getIsoFromEpoch( time.time() ) )        
     
     statsCollection = FileStatsCollector(  startTime = startTime , endTime = endTime, interval = StatsDateLib.MINUTE, totalWidth = width, fileEntries = entries,fileType= fileType, logger = logger, logging = logging )
        
             
     return statsCollection        
Example #5
0
def getLastUpdate( machine, client, fileType, currentDate, paths, collectUpToNow = False ):
    """
        @summary : Reads and returns the client's or source's last update.        
       
        @return : The client's or sources last update.   
    """ 
    
    times = {}
    lastUpdate = {}
    fileName = "%s%s_%s_%s" %( paths.STATSPICKLESTIMEOFUPDATES, fileType, client, machine )   
    
    if os.path.isfile( fileName ):
        try :
            fileHandle  = open( fileName, "r" )
            lastUpdate  = pickle.load( fileHandle )      
            fileHandle.close()
            
        except:
            print _("problematic file in loading : %s") %fileName
            lastUpdate = StatsDateLib.getIsoWithRoundedHours( StatsDateLib.getIsoFromEpoch( StatsDateLib.getSecondsSinceEpoch(currentDate ) - StatsDateLib.HOUR) )
            pass
            
        fileHandle.close()      
            
    
    else:#create a new pickle file.Set start of the pickle as last update.   
        if not os.path.isdir( os.path.dirname( fileName ) ) :
            os.makedirs( os.path.dirname( fileName ) ) 
            
        fileHandle  = open( fileName, "w" )        
    
        lastUpdate = StatsDateLib.getIsoWithRoundedHours( StatsDateLib.getIsoFromEpoch( StatsDateLib.getSecondsSinceEpoch(currentDate ) - StatsDateLib.HOUR) )
         
        pickle.dump( lastUpdate, fileHandle )
        
        fileHandle.close()
       

    return lastUpdate
Example #6
0
def main():        
    """
        @summary: Small test case to see if everything works fine 
        
    """
     
    
    statsConfig   = StatsConfigParameters()
    statsConfig.getAllParameters()
    machineconfig = MachineConfigParameters()
    machineconfig.getParametersFromMachineConfigurationFile()
    
    currentTimeEpochFormat = time.time() -(120*60)
    
    endTime = StatsDateLib.getIsoWithRoundedHours( StatsDateLib.getIsoFromEpoch( currentTimeEpochFormat  ) )
    startTime = StatsDateLib.getIsoWithRoundedHours( StatsDateLib.getIsoFromEpoch( currentTimeEpochFormat -( StatsDateLib.DAY*7 )  ) )
    print startTime, endTime
    groupName = statsConfig.groupParameters.groups[0]    
    clients = statsConfig.groupParameters.groupsMembers[ groupName ]
    machines = statsConfig.groupParameters.groupsMachines[ groupName ]    
    fileType = statsConfig.groupParameters.groupFileTypes[ groupName ]
    
    seperators = [startTime]
    seperators.extend( StatsDateLib.getSeparatorsWithStartTime( startTime = startTime , width=StatsDateLib.DAY*7, interval=StatsDateLib.HOUR )[:-1])
    
    listOfFiles = PickleMerging.createMergedPicklesList( startTime, endTime, clients, groupName, fileType, machines, seperators )
    listOfFileSizes = MemoryManagement.getListOfFileSizes(listOfFiles)
    currentFreeMemory = MemoryManagement.getCurrentFreeMemory(0.55555)                
    
    if MemoryManagement.getTotalSizeListOfFiles( listOfFiles ) > currentFreeMemory:       
      
        seperators = MemoryManagement.getSeperatorsForHourlyTreatments( startTime, endTime, currentFreeMemory, listOfFileSizes  )            
        print seperators 
    
    else: 
        print "We have %s bytes free and the pickles require %s bytes" %( currentFreeMemory, getTotalSizeListOfFiles( listOfFiles ) )
        
        print "we have enough memory to merge all these pickles."   
 def getParametersFromMonitoringConfigurationFile( self ):
     """
         @summary : Gather all the parameters from the StatsPaths.STATSETC/config file.
         
         @return :  All collected values in this order emails, machines,
                    files, folders, maxUsages, errorsLogFile, maxSettingsFile.
     
     """   
     
     statsPaths = StatsPaths()
     statsPaths.setPaths()
 
     CONFIG = statsPaths.STATSETC +"monitoringConf" 
     config = ConfigParser()
     
     if os.path.isfile( CONFIG ):
         file = open( CONFIG )
         config.readfp( file ) 
         
         self.emails        = config.get( 'statsMonitoring', 'emails' ).split( ";" )
         self.sender        = config.get( 'statsMonitoring', 'sender' )
         self.smtpServer    = config.get( 'statsMonitoring', 'smtpServer' )
         self.machines      = config.get( 'statsMonitoring', 'machines' ).split( ";" )
         self.files         = config.get( 'statsMonitoring', 'files' ).split( ";" )
         self.folders       = config.get( 'statsMonitoring', 'folders' ).split( ";" )
         self.maxUsages     = config.get( 'statsMonitoring', 'maxUsages' ).split( ";" )
         self.errorsLogFile = config.get( 'statsMonitoring', 'errorsLogFile' )
         self.maxSettingsFile=config.get( 'statsMonitoring', 'maxSettingsFile' )
                
         self.endTime = StatsDateLib.getIsoWithRoundedHours( StatsDateLib.getIsoFromEpoch( time.time() ) )            
         self.startTime = self.getPreviousMonitoringJob(self.endTime)
         self.maximumGaps = self.getMaximumGaps( )
         self.updateMachineNamesBasedOnExistingMachineTags()
         
         try:
             file.close()
         except:
             pass
         
     else:
         #print "%s configuration file not present. Please restore file prior to running" %CONFIG
         raise Exception( "%s configuration file not present. Please restore file prior to running" %CONFIG ) 
Example #8
0
 def getStartTimeAndEndTime( self, collectUptoNow = False ):
     """
         @summary : Returns the startTime and endTime of the graphics.
         
         @warning : collectUptoNow not yet supported in program !
         
         @return : the startTime and endTime of the graphics.
         
     """
     
     
     #Now not yet implemented.
     if collectUptoNow == True :
         endTime = self.currentTime
         
     else :
         endTime = StatsDateLib.getIsoWithRoundedHours( self.currentTime )
         
     startTime = StatsDateLib.getIsoFromEpoch( StatsDateLib.getSecondsSinceEpoch( endTime ) - (self.timespan * StatsDateLib.HOUR) )  
      
     return startTime, endTime
Example #9
0
def setLastUpdate( machine, client, fileType, currentDate, paths, collectUpToNow = False ):
    """
        @summary : This method sets the clients or source last update into it"S last update file. 
              
    """
    
    times = {}
    lastUpdate = {}
    
    needToCreateNewFile = False 
    fileName = "%s%s_%s_%s" %( paths.STATSPICKLESTIMEOFUPDATES, fileType, client, machine )   
    
    
    if collectUpToNow == False :
        currentDate = StatsDateLib.getIsoWithRoundedHours( currentDate ) 
    
    
    if os.path.isfile( fileName ):
        
        try:     
            fileHandle  = open( fileName, "w" )
            pickle.dump( currentDate, fileHandle )
            fileHandle.close()
        
        except:
            needToCreateNewFile = True
            
    else:
        needToCreateNewFile = True
    
    
    if needToCreateNewFile == True:        
        #create a new pickle file  
        print _("problematic file : %s") %fileName 
        if not os.path.isdir( os.path.dirname( fileName ) ) :
            os.makedirs( os.path.dirname( fileName ) )
                    
        fileHandle  = open( fileName, "w" )            
        pickle.dump( currentDate, fileHandle )        
        fileHandle.close()
Example #10
0
    def getStartTimeAndEndTime(self, collectUptoNow=False):
        """
            @summary : Returns the startTime and endTime of the graphics.
            
            @warning : collectUptoNow not yet supported in program !
            
            @return : the startTime and endTime of the graphics.
            
        """

        #Now not yet implemented.
        if collectUptoNow == True:
            endTime = self.currentTime

        else:
            endTime = StatsDateLib.getIsoWithRoundedHours(self.currentTime)

        startTime = StatsDateLib.getIsoFromEpoch(
            StatsDateLib.getSecondsSinceEpoch(endTime) -
            (self.timespan * StatsDateLib.HOUR))

        return startTime, endTime
Example #11
0
def getGraphicProducerFromParserOptions( parser ):
    """
        
        This method parses the argv received when the program was called
        It takes the params wich have been passed by the user and sets them 
        in the corresponding fields of the infos variable.   
    
        If errors are encountered in parameters used, it will immediatly terminate 
        the application. 
    
    """ 
    
    graphicType = _("other")
    mergerType = ""
    
    ( options, args )= parser.parse_args()        
    timespan         = options.timespan
    machines         = options.machines.replace( ' ','').split(',')
    clientNames      = options.clients.replace( ' ','' ).split(',')
    types            = options.types.replace( ' ', '').split(',')
    date             = options.date.replace('"','').replace("'",'')
    fileType         = options.fileType.replace("'",'')
    havingRun        = options.havingRun
    individual       = options.individual
    totals           = options.totals
    daily            = options.daily
    weekly           = options.weekly
    monthly          = options.monthly
    yearly           = options.yearly    
    fixedCurrent     = options.fixedCurrent
    fixedPrevious    = options.fixedPrevious
    copy             = options.copy
    turnOffLogging   = options.turnOffLogging
    outputLanguage   = options.outputLanguage
    
    
    if outputLanguage == "":
        outputLanguage = LanguageTools.getMainApplicationLanguage()
    else :
        if outputLanguage not in LanguageTools.getSupportedLanguages():
            print _("Error. The specified language is not currently supported by this application.")
            print _("Please specify one of the following languages %s or use the default value()" %( str( LanguageTools.getSupportedLanguages() ).replace("[","").replace("]",""), LanguageTools.getMainApplicationLanguage()  ) )
            print _("Program terminated.")
            sys.exit()
            
    counter = 0  
    specialParameters = [daily, monthly, weekly, yearly]
    for specialParameter in specialParameters:
        if specialParameter:
            counter = counter + 1 
            
    if counter > 1 :
        print _("Error. Only one of the daily, weekly and yearly options can be use at a time ")
        print _("Use -h for help.")
        print _("Program terminated.")
        sys.exit()
    
    elif counter == 1 and timespan != None :
        print _("Error. When using the daily, the weekly or the yearly options timespan cannot be specified. " )
        print _("Use -h for help.")
        print _("Program terminated.")
        sys.exit()
        
    elif counter == 0:    
        if fixedPrevious or fixedCurrent:
            print _("Error. When using one of the fixed options, please use either the -d -m -w or -y options. " )
            print _("Use -h for help.")
            print _("Program terminated.")
            sys.exit()
        
        if copy :
            if daily or not( weekly or monthly or yearly ):
                print _("Error. Copying can only be used with the -m -w or -y options. ") 
                print _("Use -h for help.")
                print _("Program terminated.")
            
                
    if counter == 0 and timespan == None :
        timespan = 12
        
    if fixedPrevious and fixedCurrent:
        print _("Error. Please use only one of the fixed options,either fixedPrevious or fixedCurrent. ") 
        print _("Use -h for help.")
        print _("Program terminated.")
        sys.exit()  
    
    if individual and totals:
        print _("Error. Please use only one of the group options,either individual or totals. ")
        print _("Use -h for help.")
        print _("Program terminated.")
        sys.exit()  
    
    try: # Makes sure date is of valid format. 
         # Makes sure only one space is kept between date and hour.
        t =  time.strptime( date, '%Y-%m-%d %H:%M:%S' )
        split = date.split()
        date = "%s %s" %( split[0], split[1] )

    except:    
        print _("Error. The date format must be YYYY-MM-DD HH:MM:SS")
        print _("Use -h for help.")
        print _("Program terminated.")
        sys.exit()         
        
    
    #Set graphic type based on parameters. Only one tpye is allowed at once based on previous validation.
    if daily :
        graphicType = _("daily")
        if fixedPrevious == False and fixedCurrent == False :
            timespan = 24
    elif weekly:
        graphicType = _("weekly")
        if fixedPrevious == False and fixedCurrent == False :
            timespan = 24 * 7
    elif monthly:
        graphicType = _("monthly")
        if fixedPrevious == False and fixedCurrent == False :
            timespan = 24 * 30
    elif yearly:
        graphicType = _("yearly")      
        if fixedPrevious == False and fixedCurrent == False :
            timespan = 24 * 365
    
    
    start, end = StatsDateLib.getStartEndInIsoFormat(date, timespan, graphicType, fixedCurrent, fixedPrevious )
    
    
    timespan = int( StatsDateLib.getSecondsSinceEpoch( end ) - StatsDateLib.getSecondsSinceEpoch( start ) ) / 3600    
                     
            
    #print "timespan %s" %timespan                           
    try:    
        if int( timespan ) < 1 :
            raise 
                
    except:
        
        print _("Error. The timespan value needs to be an integer one above 0.") 
        print _("Use -h for help.")
        print _("Program terminated.")
        sys.exit()        
         
    if fileType != "tx" and fileType != "rx":        
        print _("Error. File type must be either tx or rx.")
        print  "Specified file type was : ", fileType
        print _("Multiple types are not accepted.") 
        print _("Use -h for additional help.")
        print _("Program terminated.")
        sys.exit()            
        
                
    if havingRun == True and clientNames[0] != _("ALL"):
        print _("Error. Cannot use the havingRun option while specifying client/source names.")
        print _("To use havingRun, do not use -c|--client option.")
        print _("Use -h for additional help.")
        print _("Program terminated.")
        sys.exit()
    
    if clientNames[0] == _("ALL"):
        # Get all of the client/sources that have run between graph's start and end. 
        if totals == True or havingRun == True :          
            #print start, end, machines       
            rxNames, txNames = GeneralStatsLibraryMethods.getRxTxNamesHavingRunDuringPeriod( start, end, machines,None, havingrunOnAllMachines = True )
            mergerType = _("totalForMachine")
        else:#Build graphs only for currently runningclient/sources.      
            rxNames, txNames = GeneralStatsLibraryMethods.getRxTxNames( LOCAL_MACHINE, machines[0] )
            mergerType = _("group")
                     
        if fileType == _("tx"):    
            clientNames = txNames  
            #print clientNames
        else:
            clientNames = rxNames    
            
    else:
        if totals == True :  
            mergerType = _("regular")
    #--------------------------------------------------------------------- try :
            
    if fileType == _("tx"):       
    
        validTypes = [ _("latency"), _("bytecount"), _("errors"), _("filesOverMaxLatency"), _("filecount") ]
        
        if types[0] == _("All") :
            types = validTypes
        else :
            for t in types :
                if t not in validTypes:
                    raise Exception("%s is not a valid type" %t)
                    
    else:      
        
        validTypes = [ _("bytecount"), _("errors"), _("filecount") ]
        
        if types[0] == _("All"):
            types = validTypes
        
        else :
            for t in types :
                if t not in validTypes:
                        raise Exception("")

    #------------------------------------------------------------------- except:
        #----------------------------------------------------------- print types
        # print _("Error. With %s fileType, possible data types values are : %s.") %( fileType, validTypes )
        #---- print _("For multiple types use this syntax : -t 'type1','type2'")
        #-------------------------------- print _("Use -h for additional help.")
        #---------------------------------------- print _("Program terminated.")
        #------------------------------------------------------------ sys.exit()
  
            
    if individual != True :        
        combinedMachineName = ""
        for machine in machines:
            combinedMachineName = combinedMachineName + machine
                    
        machines = [ combinedMachineName ]              
         
                
    if len(clientNames) <1:
        print _("Error. No client/sources were found that matched the specified parameters") %( fileType, validTypes )
        print _("Verify parameters used, especially the machines parameter.")
        print _("Use -h for additional help.")
        print _("Program terminated.")
        sys.exit()


    if len(clientNames) <1:
        print _("Error. No client/sources were found that matched the specified parameters")
        print _("Verify parameters used, especially the machines parameter.")
        print _("Use -h for additional help.")
        print _("Program terminated.")
        sys.exit()  
    
    elif len(clientNames) == 1 and totals == True:   
        print _("Error. Cannot use totals option with only one client/source name.")
        print _("Either remove --total option or use more than one client/source..")
        print _("Use -h for additional help.")
        print _("Program terminated.")
        sys.exit()          
    
    end = StatsDateLib.getIsoWithRoundedHours( end )
    
    graphicsProducer = RRDGraphicProducer( startTime = start, endTime = end, graphicType = graphicType, clientNames = clientNames, types = types, timespan = timespan, machines = machines, fileType = fileType,\
                                           totals = totals, copy = copy, mergerType = mergerType,turnOffLogging = turnOffLogging, inputLanguage = LanguageTools.getMainApplicationLanguage() ,  outputLanguage = outputLanguage )   
            
    return graphicsProducer                       
Example #12
0
 def collectStats( self, types, directory, fileType = "tx", startTime = '2006-05-18 00:00:00', endTime = "", interval = 60*StatsDateLib.MINUTE, save = True  ):
     """
         @summary : This method is used to collect stats from logfiles found within a directory.
         
                     Types is the type of dats to be collected. 
                     
                     Pickle is the name of the file to be used. If not specified will be generated
                     according to the other parameters.
                     
                     FileType specifies what type of files to look for in the directory.
                     
                     StartTime and endtime specify the boundaries within wich we'll collect the data. 
                     
                     Interval the width of the entries in the stats collection 
                         
                     save can be false if for some reason user does not want to save pickle.            
                                
                     If both  of the above are true, hourly pickles will be done.
                     
                     Pre-conditions : StarTime needs to be smaller than endTime.
                                      
                                      If Daily pickling is used,width between start 
                                      and endTime needs to be no more than 24hours
                                      
                                      If Hourly pickling is used,width between start 
                                      and endTime needs to be no more than 1hour.
                                        
                 
                     If pre-conditions aren't met, application will fail.
         
     """     
     
     global _ 
     
     #Find up to date file list. 
     self.fileCollection =  LogFileCollector( startTime  = startTime , endTime = endTime, directory = directory, lastLineRead = "",\
                                              logType = fileType, name = self.client, logger = self.logger, logging = self.logging )   
     
     
     temp  = self.logger#Need to remove current logger temporarily
     del self.logger
     self.fileCollection.collectEntries()          #find all entries from the folder
     self.logger = temp 
     
     
     if self.fileCollection.logger != None : #No longer need the logger 
         self.fileCollection.logger = None  
             
     if os.path.isfile( self.pickleName ):
         
         if self.logger != None :
             self.logger.warning( _("User tried to modify allready filled pickle file." ) )
             self.logger.warning( _("Pickle was named : %s") %self.pickleName )      
         
     
     # Creates a new FileStats collector wich spans from the very 
     # start of the hour up till the end. 
     
     if self.pickleName == "":
         self.pickleName = StatsPickler.buildThisHoursFileName( client = self.client, currentTime = startTime, machine = self.machine, fileType = fileType )
 
         
     self.statsCollection = FileStatsCollector( files = self.fileCollection.entries, fileType = fileType, statsTypes = types,\
                                                startTime = StatsDateLib.getIsoWithRoundedHours( startTime ), endTime = endTime,\
                                                interval = interval, totalWidth = 1*StatsDateLib.HOUR, logger = self.logger,logging = self.logging )
     
     #Temporarily delete logger to make sure no duplicated lines appears in log file.
     temp  = self.logger
     del self.logger
     self.statsCollection.collectStats( endTime )    
     self.logger = temp
         
 
     if save == True :# must remove logger temporarily. Cannot saved opened files.
         
         if self.statsCollection.logger != None:     
             temp = self.statsCollection.logger
             del self.statsCollection.logger
             loggerNeedsToBeReplaced = True 
         
         CpickleWrapper.save ( object = self.statsCollection, filename = self.pickleName ) 
         
         try:
             os.chmod(self.pickleName, 0777)
                              
             dirname = os.path.dirname( self.pickleName )                                                  
             
             while( dirname != STATSPATHS.STATSPICKLES[:-1] ):#[:-1] removes the last / character 
                 
                 try:
                     os.chmod( dirname, 0777 )
                 except:
                     pass
                 
                 dirname = os.path.dirname(dirname)
                 
         except:
             pass    
         
         if loggerNeedsToBeReplaced :  
             self.statsCollection.logger = temp
         
         if self.logger != None:
             self.logger.info( _("Saved pickle named : %s ") %self.pickleName )                          
Example #13
0
def getOptionsFromParser( parser, logger = None  ):
    """
        
        This method parses the argv received when the program was called
        It takes the params wich have been passed by the user and sets them 
        in the corresponding fields of the infos variable.   
    
        If errors are encountered in parameters used, it will immediatly terminate 
        the application. 
    
    """    
        
    ( options, args )= parser.parse_args()        
    end       = options.end.replace( '"','' ).replace( "'",'')
    clients   = options.clients.replace( ' ','' ).split( ',' )
    machines  = options.machines.replace( ' ','' ).split( ',' )
    fileTypes = options.fileTypes.replace( ' ','' ).split( ',' )  
    products  = options.products.replace( ' ','' ).split( ',' ) 
    group     = options.group.replace( ' ','' ) 
          
         
    try: # Makes sure date is of valid format. 
         # Makes sure only one space is kept between date and hour.
        t =  time.strptime( end, '%Y-%m-%d %H:%M:%S' )#will raise exception if format is wrong.
        split = end.split()
        currentTime = "%s %s" %( split[0], split[1] )

    except:    
        print _( "Error. The endind date format must be YYYY-MM-DD HH:MM:SS" )
        print _( "Use -h for help." )
        print _( "Program terminated." )
        sys.exit()    
     
    #round ending hour to match pickleUpdater.     
    end   = StatsDateLib.getIsoWithRoundedHours( end )
        
            
    for machine in machines:
        if machine != LOCAL_MACHINE:
            GeneralStatsLibraryMethods.updateConfigurationFiles( machine, "pds" )
    
    if products[0] != _("ALL") and group == "" :
        print _( "Error. Products can only be specified when using special groups." )
        print _( "Use -h for help." )
        print _( "Program terminated." )
        sys.exit()        
    
     
                        
    #init fileTypes array here if only one fileType is specified for all clients/sources     
    if len(fileTypes) == 1 and len(clients) !=1:
        for i in range(1,len(clients) ):
            fileTypes.append(fileTypes[0])
        
    if clients[0] == _( "ALL" ) and fileTypes[0] != "":
        print _( "Error. Filetypes cannot be specified when all clients are to be updated." )
        print _( "Use -h for help." )
        print _( "Program terminated." )
        sys.exit()        
    
    elif clients[0] != _( "ALL" ) and len(clients) != len( fileTypes ) :
        print _( "Error. Specified filetypes must be either 1 for all the group or of the exact same lenght as the number of clients/sources." )
        print _( "Use -h for help." )
        print _( "Program terminated." )
        sys.exit()          
    
    elif clients[0] == _( 'ALL' ) :        
        rxNames, txNames = GeneralStatsLibraryMethods.getRxTxNames( LOCAL_MACHINE, machines[0] )

        clients = []
        clients.extend( txNames )
        clients.extend( rxNames )
        
        fileTypes = []
        for txName in txNames:
            fileTypes.append( _( "tx" ) )
        for rxName in rxNames:
            fileTypes.append( _( "rx" ) )                 
    
     
    clients = GeneralStatsLibraryMethods.filterClientsNamesUsingWilcardFilters( end, 1000, clients, machines, fileTypes= fileTypes )  
   
    
    infos = _Infos( endTime = end, machines = machines, clients = clients, fileTypes = fileTypes, products = products, group = group )   
    
    return infos     
Example #14
0
    def mergePicklesFromDifferentSources( logger = None , startTime = "2006-07-31 13:00:00",\
                                          endTime = "2006-07-31 19:00:00", clients = ["someclient"],\
                                          fileType = "tx", machines = [], groupName = "" ):
        """
            @summary : This method allows user to merge pickles coming from numerous machines
                       covering as many hours as wanted, into a single FileStatsCollector entry.
            
                       Very usefull when creating graphics on a central server with pickle files coming from 
                       remote locations.
            
        """

        combinedMachineName = ""
        combinedClientName = ""

        combinedMachineName = combinedMachineName.join(
            [machine for machine in machines])
        combinedClientName = combinedClientName.join(
            [client for client in clients])

        if groupName != "":
            clientsForVersionManagement = groupName
        else:
            clientsForVersionManagement = clients

        vc = PickleVersionChecker()

        vc.getClientsCurrentFileList(clients)

        vc.getSavedList(user=combinedMachineName,
                        clients=clientsForVersionManagement)

        width = StatsDateLib.getSecondsSinceEpoch(
            endTime) - StatsDateLib.getSecondsSinceEpoch(startTime)
        startTime = StatsDateLib.getIsoWithRoundedHours(startTime)

        seperators = [startTime]
        seperators.extend(
            StatsDateLib.getSeparatorsWithStartTime(startTime=startTime,
                                                    width=width,
                                                    interval=60 *
                                                    StatsDateLib.MINUTE)[:-1])

        mergedPickleNames =  PickleMerging.createMergedPicklesList(  startTime = startTime, endTime = endTime, machines = machines,\
                                                                     fileType = fileType, clients = clients, groupName = groupName,\
                                                                     seperators = seperators ) #Resulting list of the merger.

        for i in xrange(len(mergedPickleNames)):  #for every merger needed

            needToMergeSameHoursPickle = False
            pickleNames = PickleMerging.createNonMergedPicklesList(
                currentTime=seperators[i],
                machines=machines,
                fileType=fileType,
                clients=clients)

            if not os.path.isfile(mergedPickleNames[i]):
                needToMergeSameHoursPickle = True
            else:

                for pickle in pickleNames:  #Verify every pickle implicated in merger.
                    # if for some reason pickle has changed since last time
                    if vc.isDifferentFile(
                            file=pickle,
                            user=combinedMachineName,
                            clients=clientsForVersionManagement) == True:

                        needToMergeSameHoursPickle = True
                        break

            if needToMergeSameHoursPickle == True:  #First time or one element has changed

                PickleMerging.mergePicklesFromSameHour( logger = logger , pickleNames = pickleNames , clientName = combinedClientName,\
                                                        combinedMachineName = combinedMachineName, currentTime = seperators[i],\
                                                        mergedPickleName = mergedPickleNames[i], fileType = fileType  )

                for pickle in pickleNames:
                    vc.updateFileInList(file=pickle)

                vc.saveList(user=combinedMachineName,
                            clients=clientsForVersionManagement)

        # Once all machines have merges the necessary pickles we merge all pickles
        # into a single file stats entry.
        if groupName != "":
            nameToUseForMerger = groupName
        else:
            nameToUseForMerger = ""
            nameToUseForMerger = nameToUseForMerger.join(
                [client for client in clients])

        newFSC =  PickleMerging.mergePicklesFromDifferentHours( logger = logger , startTime = startTime, endTime = endTime, client = nameToUseForMerger,\
                                                                machine = combinedMachineName,fileType = fileType  )

        return newFSC
Example #15
0
    def mergePicklesFromDifferentHours( logger = None , startTime = "2006-07-31 13:00:00",\
                                        endTime = "2006-07-31 19:00:00", client = "satnet",\
                                        machine = "pdsPM", fileType = "tx" ):
        """
            @summary : This method merges entire hourly pickles files together. 
            
            @None    : This does not support merging part of the data of pickles.   
        
        """

        if logger != None:
            logger.debug(_("Call to mergeHourlyPickles received."))
            logging = True
        else:
            logging = False

        pickles = []
        entries = {}
        width = StatsDateLib.getSecondsSinceEpoch(
            endTime) - StatsDateLib.getSecondsSinceEpoch(startTime)
        startTime = StatsDateLib.getIsoWithRoundedHours(startTime)

        seperators = [startTime]
        seperators.extend(
            StatsDateLib.getSeparatorsWithStartTime(startTime=startTime,
                                                    width=width,
                                                    interval=60 *
                                                    StatsDateLib.MINUTE)[:-1])

        for seperator in seperators:
            pickles.append(
                StatsPickler.buildThisHoursFileName(client=client,
                                                    offset=0,
                                                    currentTime=seperator,
                                                    machine=machine,
                                                    fileType=fileType))

        startingNumberOfEntries = 0
        #print "prior to loading and merging pickles : %s " %( StatsDateLib.getIsoFromEpoch( time.time() ) )
        for pickle in pickles:

            if os.path.isfile(pickle):

                tempCollection = CpickleWrapper.load(pickle)
                if tempCollection != None:
                    for i in xrange(len(tempCollection.fileEntries)):
                        entries[startingNumberOfEntries +
                                i] = tempCollection.fileEntries[i]
                    startingNumberOfEntries = startingNumberOfEntries + len(
                        tempCollection.fileEntries)
                else:
                    sys.exit()
            else:

                emptyEntries = PickleMerging.fillWithEmptyEntries(
                    nbEmptyEntries=60, entries={})
                for i in xrange(60):
                    entries[i + startingNumberOfEntries] = emptyEntries[i]
                startingNumberOfEntries = startingNumberOfEntries + 60

        #print "after the  loading and merging og pickles : %s " %( StatsDateLib.getIsoFromEpoch( time.time() ) )

        statsCollection = FileStatsCollector(startTime=startTime,
                                             endTime=endTime,
                                             interval=StatsDateLib.MINUTE,
                                             totalWidth=width,
                                             fileEntries=entries,
                                             fileType=fileType,
                                             logger=logger,
                                             logging=logging)

        return statsCollection
Example #16
0
def getOptionsFromParser(parser, logger=None):
    """
        
        This method parses the argv received when the program was called
        It takes the params wich have been passed by the user and sets them 
        in the corresponding fields of the infos variable.   
    
        If errors are encountered in parameters used, it will immediatly terminate 
        the application. 
    
    """

    (options, args) = parser.parse_args()
    end = options.end.replace('"', '').replace("'", '')
    clients = options.clients.replace(' ', '').split(',')
    machines = options.machines.replace(' ', '').split(',')
    fileTypes = options.fileTypes.replace(' ', '').split(',')
    products = options.products.replace(' ', '').split(',')
    group = options.group.replace(' ', '')

    try:  # Makes sure date is of valid format.
        # Makes sure only one space is kept between date and hour.
        t = time.strptime(
            end,
            '%Y-%m-%d %H:%M:%S')  #will raise exception if format is wrong.
        split = end.split()
        currentTime = "%s %s" % (split[0], split[1])

    except:
        print _("Error. The endind date format must be YYYY-MM-DD HH:MM:SS")
        print _("Use -h for help.")
        print _("Program terminated.")
        sys.exit()

    #round ending hour to match pickleUpdater.
    end = StatsDateLib.getIsoWithRoundedHours(end)

    for machine in machines:
        if machine != LOCAL_MACHINE:
            GeneralStatsLibraryMethods.updateConfigurationFiles(machine, "pds")

    if products[0] != _("ALL") and group == "":
        print _(
            "Error. Products can only be specified when using special groups.")
        print _("Use -h for help.")
        print _("Program terminated.")
        sys.exit()

    #init fileTypes array here if only one fileType is specified for all clients/sources
    if len(fileTypes) == 1 and len(clients) != 1:
        for i in range(1, len(clients)):
            fileTypes.append(fileTypes[0])

    if clients[0] == _("ALL") and fileTypes[0] != "":
        print _(
            "Error. Filetypes cannot be specified when all clients are to be updated."
        )
        print _("Use -h for help.")
        print _("Program terminated.")
        sys.exit()

    elif clients[0] != _("ALL") and len(clients) != len(fileTypes):
        print _(
            "Error. Specified filetypes must be either 1 for all the group or of the exact same lenght as the number of clients/sources."
        )
        print _("Use -h for help.")
        print _("Program terminated.")
        sys.exit()

    elif clients[0] == _('ALL'):
        rxNames, txNames = GeneralStatsLibraryMethods.getRxTxNames(
            LOCAL_MACHINE, machines[0])

        clients = []
        clients.extend(txNames)
        clients.extend(rxNames)

        fileTypes = []
        for txName in txNames:
            fileTypes.append(_("tx"))
        for rxName in rxNames:
            fileTypes.append(_("rx"))

    clients = GeneralStatsLibraryMethods.filterClientsNamesUsingWilcardFilters(
        end, 1000, clients, machines, fileTypes=fileTypes)

    infos = _Infos(endTime=end,
                   machines=machines,
                   clients=clients,
                   fileTypes=fileTypes,
                   products=products,
                   group=group)

    return infos
Example #17
0
 def mergePicklesFromDifferentSources( logger = None , startTime = "2006-07-31 13:00:00",\
                                       endTime = "2006-07-31 19:00:00", clients = ["someclient"],\
                                       fileType = "tx", machines = [], groupName = "" ):
     """
         @summary : This method allows user to merge pickles coming from numerous machines
                    covering as many hours as wanted, into a single FileStatsCollector entry.
         
                    Very usefull when creating graphics on a central server with pickle files coming from 
                    remote locations.
         
     """          
        
     combinedMachineName = ""
     combinedClientName  = ""
     
     
     combinedMachineName = combinedMachineName.join( [machine for machine in machines ] )
     combinedClientName  = combinedClientName.join( [client for client in clients] )
     
     if groupName !="":
         clientsForVersionManagement = groupName 
     else:
         clientsForVersionManagement = clients
     
     vc  = PickleVersionChecker()    
        
     vc.getClientsCurrentFileList( clients )    
         
     vc.getSavedList( user = combinedMachineName, clients = clientsForVersionManagement )           
    
     width = StatsDateLib.getSecondsSinceEpoch( endTime ) - StatsDateLib.getSecondsSinceEpoch( startTime )
     startTime = StatsDateLib.getIsoWithRoundedHours( startTime )
     
     seperators = [startTime]
     seperators.extend( StatsDateLib.getSeparatorsWithStartTime( startTime = startTime , width=width, interval=60*StatsDateLib.MINUTE )[:-1])
         
     mergedPickleNames =  PickleMerging.createMergedPicklesList(  startTime = startTime, endTime = endTime, machines = machines,\
                                                                  fileType = fileType, clients = clients, groupName = groupName,\
                                                                  seperators = seperators ) #Resulting list of the merger.
        
     
     for i in xrange( len( mergedPickleNames ) ) : #for every merger needed
             
             needToMergeSameHoursPickle = False 
             pickleNames = PickleMerging.createNonMergedPicklesList( currentTime = seperators[i], machines = machines, fileType = fileType, clients = clients )
             
             if not os.path.isfile( mergedPickleNames[i] ):                
                 needToMergeSameHoursPickle = True 
             else:    
                 
                 for pickle in pickleNames : #Verify every pickle implicated in merger.
                     # if for some reason pickle has changed since last time                    
                     if vc.isDifferentFile( file = pickle, user = combinedMachineName, clients = clientsForVersionManagement ) == True :                                
                        
                         needToMergeSameHoursPickle = True 
                         break
                         
             
             if needToMergeSameHoursPickle == True :#First time or one element has changed   
                 
                 PickleMerging.mergePicklesFromSameHour( logger = logger , pickleNames = pickleNames , clientName = combinedClientName,\
                                                         combinedMachineName = combinedMachineName, currentTime = seperators[i],\
                                                         mergedPickleName = mergedPickleNames[i], fileType = fileType  )
                                     
                 for pickle in pickleNames :
                     vc.updateFileInList( file = pickle )                                               
                 
                 vc.saveList( user = combinedMachineName, clients = clientsForVersionManagement )
                 
                 
                         
     # Once all machines have merges the necessary pickles we merge all pickles 
     # into a single file stats entry. 
     if groupName !="":
         nameToUseForMerger = groupName 
     else:
         nameToUseForMerger = ""
         nameToUseForMerger = nameToUseForMerger.join( [ client for client in clients] )
     
     newFSC =  PickleMerging.mergePicklesFromDifferentHours( logger = logger , startTime = startTime, endTime = endTime, client = nameToUseForMerger,\
                                                             machine = combinedMachineName,fileType = fileType  )
    
     return newFSC
Example #18
0
def updateHourlyPickles( infos, paths, logger = None ):
    """
        @summary : This method is to be used when hourly pickling is done. -1 pickle per hour per client. 
        
        This method needs will update the pickles by collecting data from the time of the last 
        pickle up to the current date.(System time or the one specified by the user.)
        
        If for some reason data wasnt collected for one or more hour since last pickle,pickles
        for the missing hours will be created and filled with data. 
        
        If no entries are found for this client in the pickled-times file, we take for granted that
        this is a new client. In that case data will be collected from the top of the hour up to the 
        time of the call.
        
        If new client has been producing data before the day of the first call, user can specify a 
        different time than system time to specify the first day to pickle. He can then call this 
        method with the current system time, and data between first day and current time will be 
        collected so that pickling can continue like the other clients can.
        
        
    """  
    
    sp = StatsPickler( logger = logger )
    
    pathToLogFiles = GeneralStatsLibraryMethods.getPathToLogFiles( LOCAL_MACHINE, infos.machine )
    
    for i in range( len (infos.clients) ) :
        
        sp.client = infos.clients[i]
        
        width = StatsDateLib.getSecondsSinceEpoch(infos.endTime) - StatsDateLib.getSecondsSinceEpoch( StatsDateLib.getIsoWithRoundedHours(infos.startTimes[i] ) ) 
        
        
        if width > StatsDateLib.HOUR :#In case pickling didnt happen for a few hours for some reason...   
            
            hours = [infos.startTimes[i]]
            hours.extend( StatsDateLib.getSeparatorsWithStartTime( infos.startTimes[i], interval = StatsDateLib.HOUR, width = width ))
            
            for j in range( len(hours)-1 ): #Covers hours where no pickling was done.                               
                
                startOfTheHour = StatsDateLib.getIsoWithRoundedHours( hours[j] )
                startTime = startOfTheHour        
                                                   
                endTime = StatsDateLib.getIsoFromEpoch( StatsDateLib.getSecondsSinceEpoch( StatsDateLib.getIsoWithRoundedHours(hours[j+1] ) ))
                #print " client : %s startTime : %s endTime : %s" %(infos.clients[i], startTime, endTime )
                
                if startTime >= endTime and logger != None :                                
                    try:
                        logger.warning( _("Startime used in updateHourlyPickles was greater or equal to end time.") )    
                    except:
                        pass    
                
                sp.pickleName =  StatsPickler.buildThisHoursFileName( client = infos.clients[i], currentTime =  startOfTheHour, machine = infos.machine, fileType = infos.fileType )
                 
                sp.collectStats( types = infos.types, startTime = startTime , endTime = endTime, interval = infos.interval * StatsDateLib.MINUTE,\
                                 directory = pathToLogFiles, fileType = infos.fileType )                     
                           
                    
        else:      
           
            startTime = infos.startTimes[i]
            endTime   = infos.endTime             
            startOfTheHour = StatsDateLib.getIsoWithRoundedHours( infos.startTimes[i] )
            #print " client : %s startTime : %s endTime : %s" %(infos.clients[i], startTime, endTime )               
            if startTime >= endTime and logger != None :#to be removed                
                try:
                    logger.warning( _("Startime used in updateHourlyPickles was greater or equal to end time.") )    
                except:
                    pass    
                
            sp.pickleName = StatsPickler.buildThisHoursFileName( client = infos.clients[i], currentTime = startOfTheHour, machine = infos.machine, fileType = infos.fileType )            
              
            sp.collectStats( infos.types, startTime = startTime, endTime = endTime, interval = infos.interval * StatsDateLib.MINUTE, directory = pathToLogFiles, fileType = infos.fileType )        
       
                         
        setLastUpdate( machine = infos.machine, client = infos.clients[i], fileType = infos.fileType, currentDate = infos.currentDate, paths = paths, collectUpToNow = infos.collectUpToNow )
Example #19
0
def getOptionsFromParser( parser, paths, logger = None  ):
    """
        
        @summary : This method parses the argv received when the program was called
                   It takes the params wich have been passed by the user and sets them 
                   in the corresponding fields of the hlE variable.   
    
        @Warning : If errors are encountered in parameters used, it will immediatly terminate 
                   the application. 
    
    """ 
    
    directories  = []
    startTimes   = []
    
    ( options, args ) = parser.parse_args()        
    
    interval       = options.interval
    collectUpToNow = options.collectUpToNow 
    currentDate    = options.currentDate.replace( '"','' ).replace( "'",'' )
    currentDate    = StatsDateLib.getIsoWithRoundedHours( currentDate ) 
    fileType       = options.fileType.replace( "'",'' )
    machine        = options.machine.replace( " ","" )
    clients        = options.clients.replace(' ','' ).split( ',' )
    types          = options.types.replace( ' ', '' ).split( ',' )
    pathToLogFiles = GeneralStatsLibraryMethods.getPathToLogFiles( LOCAL_MACHINE, machine )
    
    #print "*****pathToLogFiles %s" %pathToLogFiles
    
    
    try: # Makes sure date is of valid format. 
         # Makes sure only one space is kept between date and hour.
        t =  time.strptime( currentDate, '%Y-%m-%d %H:%M:%S' )
        split = currentDate.split()
        currentDate = "%s %s" %( split[0],split[1] )

    except:    
        print _("Error. The date format must be YYYY-MM-DD HH:MM:SS" )
        print _("Use -h for help.")
        print _("Program terminated.")
        sys.exit()
            
        
    try:    
        if int( interval ) < 1 :
            raise 
    
    except:
        
        print _("Error. The interval value needs to be an integer one above 0." )
        print _("Use -h for help.")
        print _("Program terminated.")
        sys.exit()
        
    
    if fileType != "tx" and fileType != "rx":
        print _("Error. File type must be either tx or rx.")
        print _('Multiple types are not accepted.' )
        print _("Use -h for additional help.")
        print _("Program terminated.")
        sys.exit()    
        
    
    if fileType == "tx":       
        validTypes = [ _("errors"), _("latency"), _("bytecount") ]

    else:
        validTypes = [ _("errors"), _("bytecount") ]
     
    
    if types[0] == _("All"):
        types = validTypes
                     
    try :
        for t in types :
            if t not in validTypes:
                raise 

    except:    
        
        print _("Error. With %s fileType, possible data types values are : %s.") %(fileType,validTypes )
        print _('For multiple types use this syntax : -t "type1,type2"' )
        print _("Use -h for additional help.")
        print _("Program terminated.")
        sys.exit()



    def translateType(typeToTranslate):
        translations = { _("errors"):"errors", _("latency"):"latency", _("bytecount"):"bytecount" }
        return translations[typeToTranslate]
        
    types = map( translateType, types )     

    
    if clients[0] == _("All") :
        rxNames, txNames = GeneralStatsLibraryMethods.getRxTxNames( LOCAL_MACHINE, machine )
       
        if fileType == "tx": 
            clients = txNames                     
        else:
            clients = rxNames          
        
          
    #print "clients found :%s" %clients   
             
    # Verify that each client needs to be updated. 
    # If not we add a warning to the logger and removwe the client from the list
    # since it's not needed, but other clients might be.
    usefullClients = []
    for client in clients :
        startTime = getLastUpdate( machine = machine, client = client, fileType= fileType, currentDate =  currentDate , paths = paths, collectUpToNow = collectUpToNow )
               
        if currentDate > startTime:
            #print " client : %s currentDate : %s   startTime : %s" %( client, currentDate, startTime )
            directories.append( pathToLogFiles )
            startTimes.append( startTime )
            usefullClients.append( client )
        else:
            #print "This client was not updated since it's last update was more recent than specified date : %s" %client
            if logger != None :
                try:
                    logger.warning( _("This client was not updated since it's last update was more recent than specified date : %s") %client)      
                except :
                    pass    
                
    infos = _UpdaterInfos( currentDate = currentDate, clients = usefullClients, startTimes = startTimes, directories = directories ,\
                           types = types, collectUpToNow = collectUpToNow, fileType = fileType, machine = machine )
    
    if collectUpToNow == False:
        infos.endTime = StatsDateLib.getIsoWithRoundedHours( infos.currentDate ) 
       
        
    return infos 
Example #20
0
    def collectStats(self,
                     types,
                     directory,
                     fileType="tx",
                     startTime='2006-05-18 00:00:00',
                     endTime="",
                     interval=60 * StatsDateLib.MINUTE,
                     save=True):
        """
            @summary : This method is used to collect stats from logfiles found within a directory.
            
                        Types is the type of dats to be collected. 
                        
                        Pickle is the name of the file to be used. If not specified will be generated
                        according to the other parameters.
                        
                        FileType specifies what type of files to look for in the directory.
                        
                        StartTime and endtime specify the boundaries within wich we'll collect the data. 
                        
                        Interval the width of the entries in the stats collection 
                            
                        save can be false if for some reason user does not want to save pickle.            
                                   
                        If both  of the above are true, hourly pickles will be done.
                        
                        Pre-conditions : StarTime needs to be smaller than endTime.
                                         
                                         If Daily pickling is used,width between start 
                                         and endTime needs to be no more than 24hours
                                         
                                         If Hourly pickling is used,width between start 
                                         and endTime needs to be no more than 1hour.
                                           
                    
                        If pre-conditions aren't met, application will fail.
            
        """

        global _

        #Find up to date file list.
        self.fileCollection =  LogFileCollector( startTime  = startTime , endTime = endTime, directory = directory, lastLineRead = "",\
                                                 logType = fileType, name = self.client, logger = self.logger, logging = self.logging )

        temp = self.logger  #Need to remove current logger temporarily
        del self.logger
        self.fileCollection.collectEntries()  #find all entries from the folder
        self.logger = temp

        if self.fileCollection.logger != None:  #No longer need the logger
            self.fileCollection.logger = None

        if os.path.isfile(self.pickleName):

            if self.logger != None:
                self.logger.warning(
                    _("User tried to modify allready filled pickle file."))
                self.logger.warning(
                    _("Pickle was named : %s") % self.pickleName)

        # Creates a new FileStats collector wich spans from the very
        # start of the hour up till the end.

        if self.pickleName == "":
            self.pickleName = StatsPickler.buildThisHoursFileName(
                client=self.client,
                currentTime=startTime,
                machine=self.machine,
                fileType=fileType)


        self.statsCollection = FileStatsCollector( files = self.fileCollection.entries, fileType = fileType, statsTypes = types,\
                                                   startTime = StatsDateLib.getIsoWithRoundedHours( startTime ), endTime = endTime,\
                                                   interval = interval, totalWidth = 1*StatsDateLib.HOUR, logger = self.logger,logging = self.logging )

        #Temporarily delete logger to make sure no duplicated lines appears in log file.
        temp = self.logger
        del self.logger
        self.statsCollection.collectStats(endTime)
        self.logger = temp

        if save == True:  # must remove logger temporarily. Cannot saved opened files.

            if self.statsCollection.logger != None:
                temp = self.statsCollection.logger
                del self.statsCollection.logger
                loggerNeedsToBeReplaced = True

            CpickleWrapper.save(object=self.statsCollection,
                                filename=self.pickleName)

            try:
                os.chmod(self.pickleName, 0777)

                dirname = os.path.dirname(self.pickleName)

                while (dirname != STATSPATHS.STATSPICKLES[:-1]
                       ):  #[:-1] removes the last / character

                    try:
                        os.chmod(dirname, 0777)
                    except:
                        pass

                    dirname = os.path.dirname(dirname)

            except:
                pass

            if loggerNeedsToBeReplaced:
                self.statsCollection.logger = temp

            if self.logger != None:
                self.logger.info(
                    _("Saved pickle named : %s ") % self.pickleName)
Example #21
0
def getOptionsFromParser( parser ):
    """
        
        @summary : This method parses the argv received when 
                   the program was called.
        
                   It takes the params wich have been passed by
                   the user and sets them in the corresponding 
                   fields of the infos variable.   
    
        @note :    If errors are encountered in parameters used, 
                   it will immediatly terminate the application. 
    
    """ 
    
    currentTime   = []
    
    ( options, args )= parser.parse_args()            
    collectUpToNow   = options.collectUpToNow
    timespan         = options.timespan
    machines         = options.machines.replace( ' ','').split(',')
    clientNames      = options.clients.replace( ' ','' ).split(',')
    types            = options.types.replace( ' ', '').split(',')
    currentTime      = options.currentTime.replace('"','').replace("'",'')
    fileType         = options.fileType.replace("'",'')
    collectUpToNow   = options.collectUpToNow
    copy             = options.copy
    combineClients   = options.combineClients
    productTypes     = options.productTypes.replace( ' ', '' ).split( ',' )     
    groupName        = options.groupName.replace( ' ','' ) 
    outputLanguage   = options.outputLanguage
    
    try: # Makes sure date is of valid format. 
         # Makes sure only one space is kept between date and hour.
        t =  time.strptime( currentTime, '%Y-%m-%d %H:%M:%S' )
        split = currentTime.split()
        currentTime = "%s %s" %( split[0], split[1] )

    except:    
        print _("Error. The date format must be YYYY-MM-DD HH:MM:SS" )
        print _("Use -h for help.")
        print _("Program terminated.")
        sys.exit()
    
    
    try:    
        if int( timespan ) < 1 :
            raise 
                
    except:
        
        print _("Error. The timespan value needs to be an integer one above 0." )
        print _("Use -h for help.")
        print _("Program terminated.")
        sys.exit()
        
    
    if fileType != "tx" and fileType != "rx":
        print _("Error. File type must be either tx or rx.")
        print _('Multiple types are not accepted.' )
        print _("Use -h for additional help.")
        print _("Program terminated.")
        sys.exit()    
    
    
    if groupName != "" and combineClients == False :
        print _("Error. -g|--groupeName option requires the --combineClients options.")
        print _('Group names are otherwise irrelevant.' )
        print _("Use -h for additional help.")
        print _("Program terminated.")
        sys.exit()          
        
        
    try :
    
        if fileType == "tx":       
            validTypes = [ _("errors"), _("filecount"), _("bytecount"), _("latency") ]
            
            if types[0] == _("All"):
                types = validTypes
            else :
                for t in types :
                    if t not in validTypes:
                        raise Exception("")
        else:
            validTypes = [ _("errors"), _("filecount"), _("bytecount") ]
            
            if types[0] == _("All"):
                types = validTypes
            
            else :
                for t in types :
                    if t not in validTypes:
                        raise Exception("")

    except:    
        
        print _("Error. With %s fileType, possible data types values are : %s.") %( fileType,validTypes )
        print _('For multiple types use this syntax : -t "type1,type2"') 
        print _("Use -h for additional help.")
        print _("Program terminated.")
        sys.exit()
    
    if outputLanguage not in LanguageTools.getSupportedLanguages():
        print _("Error. %s is not one of the supproted languages")
        print _("Use one of the following languages : %s" % str( LanguageTools.getSupportedLanguages() ).replace("[","").replace("]","") )
        print _("Use -h for additional help.")
        print _("Program terminated.")
        
    clientNames = GeneralStatsLibraryMethods.filterClientsNamesUsingWilcardFilters( currentTime, timespan, clientNames, machines, [fileType])
    
    directory =  GeneralStatsLibraryMethods.getPathToLogFiles( LOCAL_MACHINE, machines[0] )
    
    infos = _GraphicsInfos( collectUpToNow = collectUpToNow, currentTime = currentTime, clientNames = clientNames,\
                            groupName = groupName,  directory = directory , types = types, fileType = fileType, \
                            timespan = timespan, productTypes = productTypes, machines = machines, copy = copy, \
                            combineClients = combineClients, outputLanguage = outputLanguage )
    
    if collectUpToNow == False:
        infos.endTime = StatsDateLib.getIsoWithRoundedHours( infos.currentTime ) 
    
    
    return infos 
Example #22
0
def getOptionsFromParser(parser):
    """
        
        @summary : This method parses the argv received when 
                   the program was called.
        
                   It takes the params wich have been passed by
                   the user and sets them in the corresponding 
                   fields of the infos variable.   
    
        @note :    If errors are encountered in parameters used, 
                   it will immediatly terminate the application. 
    
    """

    currentTime = []

    (options, args) = parser.parse_args()
    collectUpToNow = options.collectUpToNow
    timespan = options.timespan
    machines = options.machines.replace(' ', '').split(',')
    clientNames = options.clients.replace(' ', '').split(',')
    types = options.types.replace(' ', '').split(',')
    currentTime = options.currentTime.replace('"', '').replace("'", '')
    fileType = options.fileType.replace("'", '')
    collectUpToNow = options.collectUpToNow
    copy = options.copy
    combineClients = options.combineClients
    productTypes = options.productTypes.replace(' ', '').split(',')
    groupName = options.groupName.replace(' ', '')
    outputLanguage = options.outputLanguage

    try:  # Makes sure date is of valid format.
        # Makes sure only one space is kept between date and hour.
        t = time.strptime(currentTime, '%Y-%m-%d %H:%M:%S')
        split = currentTime.split()
        currentTime = "%s %s" % (split[0], split[1])

    except:
        print _("Error. The date format must be YYYY-MM-DD HH:MM:SS")
        print _("Use -h for help.")
        print _("Program terminated.")
        sys.exit()

    try:
        if int(timespan) < 1:
            raise

    except:

        print _(
            "Error. The timespan value needs to be an integer one above 0.")
        print _("Use -h for help.")
        print _("Program terminated.")
        sys.exit()

    if fileType != "tx" and fileType != "rx":
        print _("Error. File type must be either tx or rx.")
        print _('Multiple types are not accepted.')
        print _("Use -h for additional help.")
        print _("Program terminated.")
        sys.exit()

    if groupName != "" and combineClients == False:
        print _(
            "Error. -g|--groupeName option requires the --combineClients options."
        )
        print _('Group names are otherwise irrelevant.')
        print _("Use -h for additional help.")
        print _("Program terminated.")
        sys.exit()

    try:

        if fileType == "tx":
            validTypes = [
                _("errors"),
                _("filecount"),
                _("bytecount"),
                _("latency")
            ]

            if types[0] == _("All"):
                types = validTypes
            else:
                for t in types:
                    if t not in validTypes:
                        raise Exception("")
        else:
            validTypes = [_("errors"), _("filecount"), _("bytecount")]

            if types[0] == _("All"):
                types = validTypes

            else:
                for t in types:
                    if t not in validTypes:
                        raise Exception("")

    except:

        print _("Error. With %s fileType, possible data types values are : %s."
                ) % (fileType, validTypes)
        print _('For multiple types use this syntax : -t "type1,type2"')
        print _("Use -h for additional help.")
        print _("Program terminated.")
        sys.exit()

    if outputLanguage not in LanguageTools.getSupportedLanguages():
        print _("Error. %s is not one of the supproted languages")
        print _("Use one of the following languages : %s" %
                str(LanguageTools.getSupportedLanguages()).replace(
                    "[", "").replace("]", ""))
        print _("Use -h for additional help.")
        print _("Program terminated.")

    clientNames = GeneralStatsLibraryMethods.filterClientsNamesUsingWilcardFilters(
        currentTime, timespan, clientNames, machines, [fileType])

    directory = GeneralStatsLibraryMethods.getPathToLogFiles(
        LOCAL_MACHINE, machines[0])

    infos = _GraphicsInfos( collectUpToNow = collectUpToNow, currentTime = currentTime, clientNames = clientNames,\
                            groupName = groupName,  directory = directory , types = types, fileType = fileType, \
                            timespan = timespan, productTypes = productTypes, machines = machines, copy = copy, \
                            combineClients = combineClients, outputLanguage = outputLanguage )

    if collectUpToNow == False:
        infos.endTime = StatsDateLib.getIsoWithRoundedHours(infos.currentTime)

    return infos