def createMergedPicklesList(startTime, endTime, clients, groupName, fileType, machines, seperators): """ @param machines: Machines must be an array containing the list of machines to use. If only one machine is to be used still use an array containing a single item. """ pickleList = [] combinedMachineName = "" combinedMachineName = combinedMachineName.join( [machine for machine in machines]) if groupName == "" or groupName is None: groupName = groupName.join([client for client in clients]) for seperator in seperators: pickleList.append( StatsPickler.buildThisHoursFileName( client=groupName, currentTime=seperator, fileType=fileType, machine=combinedMachineName)) return pickleList
def collectDataForCombinedGraphics(self, startTime, endTime, types): """ @summary : Returns a list of one ClientStatsPicklers instance wich contains the combined data of all the individual graphics. @return : Returns a list of ONE ClientStatsPicklers """ dataCollection = [] statsCollection = PickleMerging.mergePicklesFromDifferentSources( logger = None , startTime = startTime, endTime = endTime,\ clients = self.clientNames, fileType = self.fileType,\ machines = self.machines, groupName = self.groupName ) combinedMachineName = "" combinedMachineName = combinedMachineName.join( [machine for machine in self.machines]) #Verifier params utiliser par cette ligne dataCollection.append( StatsPickler( client = self.clientNames, statsTypes = types, directory = self.directory,\ statsCollection = statsCollection, machine = combinedMachineName,\ logger = None, logging = False ) ) return dataCollection
def getMergedData(clients, fileType, machines, startTime, endTime, groupName="", logger=None): """ This method returns all data comprised between startTime and endTime as to be able to build pairs. """ if fileType == "tx": types = [ "errors", "bytecount", "latency", ] else: types = ["errors", "bytecount"] #print startTime, endTime if len(machines) > 1 or len(clients) > 1: statsCollection = PickleMerging.mergePicklesFromDifferentSources( logger=logger, startTime=startTime, endTime=endTime, clients=clients, fileType=fileType, machines=machines, groupName=groupName) else: #only one machine, only merge different hours together statsCollection = PickleMerging.mergePicklesFromDifferentHours( logger=logger, startTime=startTime, endTime=endTime, client=clients[0], fileType=fileType, machine=machines[0]) combinedMachineName = "" for machine in machines: combinedMachineName = combinedMachineName + machine dataCollector = StatsPickler(client=clients[0], statsTypes=types, directory="", statsCollection=statsCollection, machine=combinedMachineName, logger=logger) return dataCollector
def mergePicklesFromDifferentHours( logger = None , startTime = "2006-07-31 13:00:00",\ endTime = "2006-07-31 19:00:00", client = "satnet",\ machine = "pdsPM", fileType = "tx" ): """ @summary : This method merges entire hourly pickles files together. @None : This does not support merging part of the data of pickles. """ if logger != None : logger.debug( _("Call to mergeHourlyPickles received.") ) logging = True else: logging = False pickles = [] entries = {} width = StatsDateLib.getSecondsSinceEpoch( endTime ) - StatsDateLib.getSecondsSinceEpoch( startTime ) startTime = StatsDateLib.getIsoWithRoundedHours( startTime ) seperators = [startTime] seperators.extend( StatsDateLib.getSeparatorsWithStartTime( startTime = startTime , width=width, interval=60*StatsDateLib.MINUTE )[:-1]) for seperator in seperators : pickles.append( StatsPickler.buildThisHoursFileName( client = client, offset = 0, currentTime = seperator, machine = machine, fileType = fileType ) ) startingNumberOfEntries = 0 #print "prior to loading and merging pickles : %s " %( StatsDateLib.getIsoFromEpoch( time.time() ) ) for pickle in pickles : if os.path.isfile( pickle ) : tempCollection = CpickleWrapper.load( pickle ) if tempCollection != None : for i in xrange( len( tempCollection.fileEntries ) ): entries[startingNumberOfEntries + i] = tempCollection.fileEntries[i] startingNumberOfEntries = startingNumberOfEntries + len( tempCollection.fileEntries ) else: sys.exit() else: emptyEntries = PickleMerging.fillWithEmptyEntries( nbEmptyEntries = 60, entries = {} ) for i in xrange( 60 ): entries[i + startingNumberOfEntries ] = emptyEntries [i] startingNumberOfEntries = startingNumberOfEntries + 60 #print "after the loading and merging og pickles : %s " %( StatsDateLib.getIsoFromEpoch( time.time() ) ) statsCollection = FileStatsCollector( startTime = startTime , endTime = endTime, interval = StatsDateLib.MINUTE, totalWidth = width, fileEntries = entries,fileType= fileType, logger = logger, logging = logging ) return statsCollection
def createNonMergedPicklesList( currentTime, machines, fileType, clients ): """ @summary : Create a list of all pickles names concerning different machines for a certain hour. """ pickleList = [] #print machines #print clients for machine in machines: for client in clients: pickleList.append( StatsPickler.buildThisHoursFileName( client = client, currentTime = currentTime, fileType = fileType, machine = machine ) ) return pickleList
def collectDataForIndividualGraphics(self, startTime, endTime, types): #find parameters """ @summary : Returns a list of ClientStatsPicklers instances, each of wich contains data for all the individual graphics. @return : List of ClientStatsPicklers instances. """ global _ dataCollection = [] for client in self.clientNames: # #Gather data from all previously created pickles.... if self.logger != None: try: self.logger.debug(_("Call to mergeHourlyPickles.")) self.logger.debug( _("Parameters used : %s %s %s") % (startTime, endTime, client)) except: pass if len(self.machines) > 1: clientArray = [] clientArray.append(client) statsCollection = PickleMerging.mergePicklesFromDifferentSources( logger = None , startTime = startTime, endTime = endTime,\ clients = clientArray, fileType = self.fileType,\ machines = self.machines, groupName = self.groupName ) else: #only one machine, only merge different hours together statsCollection = PickleMerging.mergePicklesFromDifferentHours( logger = None , startTime = startTime, endTime = endTime,\ client = client, fileType = self.fileType, machine = self.machines[0] ) combinedMachineName = "" combinedMachineName = combinedMachineName.join( [machine for machine in self.machines]) dataCollection.append( StatsPickler( client = self.clientNames, statsTypes = types, directory = self.directory,\ statsCollection = statsCollection, machine = combinedMachineName,\ logger = None, logging =False ) ) return dataCollection
def createNonMergedPicklesList(currentTime, machines, fileType, clients): """ @summary : Create a list of all pickles names concerning different machines for a certain hour. """ pickleList = [] #print machines #print clients for machine in machines: for client in clients: pickleList.append( StatsPickler.buildThisHoursFileName( client=client, currentTime=currentTime, fileType=fileType, machine=machine)) return pickleList
def createMergedPicklesList( startTime, endTime, clients, groupName, fileType, machines, seperators ): """ @param machines: Machines must be an array containing the list of machines to use. If only one machine is to be used still use an array containing a single item. """ pickleList = [] combinedMachineName = "" combinedMachineName = combinedMachineName.join( [machine for machine in machines] ) if groupName == "" or groupName is None: groupName = groupName.join( [client for client in clients]) for seperator in seperators: pickleList.append( StatsPickler.buildThisHoursFileName( client = groupName, currentTime = seperator, fileType = fileType, machine = combinedMachineName ) ) return pickleList
def mergePicklesFromDifferentHours( logger = None , startTime = "2006-07-31 13:00:00",\ endTime = "2006-07-31 19:00:00", client = "satnet",\ machine = "pdsPM", fileType = "tx" ): """ @summary : This method merges entire hourly pickles files together. @None : This does not support merging part of the data of pickles. """ if logger != None: logger.debug(_("Call to mergeHourlyPickles received.")) logging = True else: logging = False pickles = [] entries = {} width = StatsDateLib.getSecondsSinceEpoch( endTime) - StatsDateLib.getSecondsSinceEpoch(startTime) startTime = StatsDateLib.getIsoWithRoundedHours(startTime) seperators = [startTime] seperators.extend( StatsDateLib.getSeparatorsWithStartTime(startTime=startTime, width=width, interval=60 * StatsDateLib.MINUTE)[:-1]) for seperator in seperators: pickles.append( StatsPickler.buildThisHoursFileName(client=client, offset=0, currentTime=seperator, machine=machine, fileType=fileType)) startingNumberOfEntries = 0 #print "prior to loading and merging pickles : %s " %( StatsDateLib.getIsoFromEpoch( time.time() ) ) for pickle in pickles: if os.path.isfile(pickle): tempCollection = CpickleWrapper.load(pickle) if tempCollection != None: for i in xrange(len(tempCollection.fileEntries)): entries[startingNumberOfEntries + i] = tempCollection.fileEntries[i] startingNumberOfEntries = startingNumberOfEntries + len( tempCollection.fileEntries) else: sys.exit() else: emptyEntries = PickleMerging.fillWithEmptyEntries( nbEmptyEntries=60, entries={}) for i in xrange(60): entries[i + startingNumberOfEntries] = emptyEntries[i] startingNumberOfEntries = startingNumberOfEntries + 60 #print "after the loading and merging og pickles : %s " %( StatsDateLib.getIsoFromEpoch( time.time() ) ) statsCollection = FileStatsCollector(startTime=startTime, endTime=endTime, interval=StatsDateLib.MINUTE, totalWidth=width, fileEntries=entries, fileType=fileType, logger=logger, logging=logging) return statsCollection
def updateHourlyPickles( infos, paths, logger = None ): """ @summary : This method is to be used when hourly pickling is done. -1 pickle per hour per client. This method needs will update the pickles by collecting data from the time of the last pickle up to the current date.(System time or the one specified by the user.) If for some reason data wasnt collected for one or more hour since last pickle,pickles for the missing hours will be created and filled with data. If no entries are found for this client in the pickled-times file, we take for granted that this is a new client. In that case data will be collected from the top of the hour up to the time of the call. If new client has been producing data before the day of the first call, user can specify a different time than system time to specify the first day to pickle. He can then call this method with the current system time, and data between first day and current time will be collected so that pickling can continue like the other clients can. """ sp = StatsPickler( logger = logger ) pathToLogFiles = GeneralStatsLibraryMethods.getPathToLogFiles( LOCAL_MACHINE, infos.machine ) for i in range( len (infos.clients) ) : sp.client = infos.clients[i] width = StatsDateLib.getSecondsSinceEpoch(infos.endTime) - StatsDateLib.getSecondsSinceEpoch( StatsDateLib.getIsoWithRoundedHours(infos.startTimes[i] ) ) if width > StatsDateLib.HOUR :#In case pickling didnt happen for a few hours for some reason... hours = [infos.startTimes[i]] hours.extend( StatsDateLib.getSeparatorsWithStartTime( infos.startTimes[i], interval = StatsDateLib.HOUR, width = width )) for j in range( len(hours)-1 ): #Covers hours where no pickling was done. startOfTheHour = StatsDateLib.getIsoWithRoundedHours( hours[j] ) startTime = startOfTheHour endTime = StatsDateLib.getIsoFromEpoch( StatsDateLib.getSecondsSinceEpoch( StatsDateLib.getIsoWithRoundedHours(hours[j+1] ) )) #print " client : %s startTime : %s endTime : %s" %(infos.clients[i], startTime, endTime ) if startTime >= endTime and logger != None : try: logger.warning( _("Startime used in updateHourlyPickles was greater or equal to end time.") ) except: pass sp.pickleName = StatsPickler.buildThisHoursFileName( client = infos.clients[i], currentTime = startOfTheHour, machine = infos.machine, fileType = infos.fileType ) sp.collectStats( types = infos.types, startTime = startTime , endTime = endTime, interval = infos.interval * StatsDateLib.MINUTE,\ directory = pathToLogFiles, fileType = infos.fileType ) else: startTime = infos.startTimes[i] endTime = infos.endTime startOfTheHour = StatsDateLib.getIsoWithRoundedHours( infos.startTimes[i] ) #print " client : %s startTime : %s endTime : %s" %(infos.clients[i], startTime, endTime ) if startTime >= endTime and logger != None :#to be removed try: logger.warning( _("Startime used in updateHourlyPickles was greater or equal to end time.") ) except: pass sp.pickleName = StatsPickler.buildThisHoursFileName( client = infos.clients[i], currentTime = startOfTheHour, machine = infos.machine, fileType = infos.fileType ) sp.collectStats( infos.types, startTime = startTime, endTime = endTime, interval = infos.interval * StatsDateLib.MINUTE, directory = pathToLogFiles, fileType = infos.fileType ) setLastUpdate( machine = infos.machine, client = infos.clients[i], fileType = infos.fileType, currentDate = infos.currentDate, paths = paths, collectUpToNow = infos.collectUpToNow )