def __init__(self): self.getOptionsParser() #print SwitchoverCopier.DRBD if SwitchoverCopier.SYSTEM == 'PX': from PXManager import PXManager manager = PXManager(None, SwitchoverCopier.DRBD + '/px/') LOG_NAME = manager.LOG + 'PX_SwitchoverCopier.log' # Log's name SwitchoverCopier.SWITCH_DIR = '/apps/px/switchover/' elif SwitchoverCopier.SYSTEM == 'PDS': from PDSManager import PDSManager manager = PDSManager(None, SwitchoverCopier.DRBD + '/pds/') LOG_NAME = manager.LOG + 'PDS_SwitchoverCopier.log' # Log's name SwitchoverCopier.SWITCH_DIR = '/apps/pds/switchover/' self.logger = Logger(LOG_NAME, SwitchoverCopier.LOG_LEVEL, "Copier") self.logger = self.logger.getLogger() manager.setLogger(self.logger) manager.afterInit() self.logger.info("Beginning program SwitchoverCopier") self.rxPaths = manager.getRxPaths() self.txPaths = manager.getTxPaths() self.logger.info("Receivers paths: " + str(self.rxPaths)) self.logger.info("Senders paths: " + str(self.txPaths)) self.manager = manager
def __init__(self, source=None, logger=None): # General Attributes self.source = source self.reader = None self.drp = None self.count = 0 self.Mcount = 99999 if source is not None: self.ingestDir = PXPaths.RXQ + self.source.name if self.source.type == 'filter' or self.source.type == 'filter-bulletin' : self.ingestDir = PXPaths.FXQ + self.source.name self.logger = source.logger elif logger is not None: self.logger = logger self.pxManager = PXManager() # Create a manager self.pxManager.setLogger(self.logger) # Give the logger to the the manager self.pxManager.initNames() # Set rx and tx names self.clientNames = self.pxManager.getTxNames() # Obtains the list of client's names (the ones to wich we can link files) self.filterNames = self.pxManager.getFxNames() # Obtains the list of filter's names (the ones to wich we can link files) if source is not None: if self.source.name in self.filterNames : self.filterNames.remove(self.source.name) self.sourlientNames = self.pxManager.getTRxNames() # Obtains the list of sourlient's names (the ones to wich we can link files) self.allNames = self.clientNames + self.filterNames + self.sourlientNames # Clients + Sourlients names self.clients = {} # All the Client/Filter/Sourlient objects self.fileCache = None # product processed. self.dbDirsCache = CacheManager(maxEntries=200000, timeout=25*3600) # Directories created in the DB self.clientDirsCache = CacheManager(maxEntries=25000, timeout=2*3600) # File ingested in RXQ self.feedNames = [] # source to feed self.feeds = {} if source is not None: self.logger.info("Ingestor (source %s) can link files to clients: %s" % (source.name, self.allNames))
def setUp(self, logFile='log/PXManager.log'): self.logger = Logger(logFile, 'DEBUG', 'Sub') self.logger = self.logger.getLogger() self.manager = PXManager() self.manager.setLogger(self.logger) self.manager.initNames() self.manager.initPXPaths() self.manager.initShouldRunNames() self.manager.initRunningNames()
def __init__(self, nopull=False, keep=False, date=None, xstats=False): PXPaths.normalPaths() self.manager = PXManager() #self.logger = logger.getLogger() # Date for which we want to obtain stats if date == None: self.date = dateLib.getYesterdayFormatted() # ISO Date else: self.date = date self.dateDashed = dateLib.getISODateDashed(self.date) self.machines = [] # Machines were the logs can be found self.sources = [ ] # Sources for which we will check arrival time of the products self.client = [ ] # Client for which we will check delivery time of the products (ONLY ONE ENTRY in the list) self.messages = [] # FIXME: Special messages coming from weird results self.nopull = nopull # Do not pull the necessary files (we suppose they are already downloaded) self.keep = keep # Erase all the files present before downloading new files self.xstats = xstats # Boolean that determine if we will use xferlog in making stats self.goodRx = [] # Lines matching initial values self.goodTx = [] # Lines matching initial values self.goodXferlog = [] # Lines matching initial values self.receivingInfos = { } # Dict. addressed by filename and containing a tuple of (formatted date, date in seconds, machine) self.sendingInfos = { } # Dict. addressed by filename and containing a tuple of (formatted date, date in seconds, machine) self.xferlogInfos = { } # Dict. addressed by filename and containing a tuple of (formatted date, date in seconds, machine) self.stats = {} # Final stats self.sortedStats = [] # Final sorted stats self.max = 0 # Maximum latency time in seconds self.min = sys.maxint # Minimum latency time in seconds self.mean = 0 # Mean latency time in seconds self.latencyThreshold = 15 # We don't want to go over this threshold (in seconds) self.overThreshold = 0 # Number of files with latency over threshold self.underThresholdP = 0 # Percentage of files for which the latency is equal or under threshold self.meanWaiting = 0 # Mean waiting time before being noticed by the PDS self.random = str( random.random() )[2:] # Unique identificator permitting the program to be run in parallel self.system = None # 'PDS' or 'PX' self.rejected = 0 # Count of rejected files self.maxInfos = ['NO FILE', ('00:00:00', 'No machine', 0) ] # Informations about the max.
def __init__(self, request, addOn, sendOn, logger): self.logger = logger self.addOn = addOn self.sendOn = sendOn self.drp = DirectRoutingParser(PXPaths.ROUTING_TABLE, [], logger) self.drp.printErrors = False self.drp.parseAndShowErrors() self.dbs = DBSearcher(request, False) self.results = self.dbs.results self.receiverName = 'request-reply' self.pxManager = PXManager() self.pxManager.setLogger(self.logger) self.pxManager.initNames() self.bulletin = '' self.constructBulletin()
def __init__(self, type='impulses', interval=1, imageName=None): PXPaths.normalPaths() self.manager = PXManager() #self.logger = logger #self.manager.setLogger(self.logger) self.latenciers = [] # Infos about a particular "latencier" self.type = type # Type of graph must be in: ['linespoint', 'lines', 'boxes', 'impulses'] self.interval = interval * dateLib.MINUTE # Number of seconds between each point on the x-axis self.imageName = imageName # Name of the image file self.color = None self.width = dateLib.DAY # Width of the x-axis in seconds # With witdh=DAY and interval=MINUTE => len([60, 120, 180, ..., 86400]) = 1440 self.separators = dateLib.getSeparators(self.width, self.interval) # '"0" 0, "1" 60, "2" 120, "3" 180, "4" 240, ... , "22" 1320, "23" 1380, "24" 1440' self.xtics = self.getXTics(len(self.separators), self.interval) self.graph = Gnuplot.Gnuplot()
def getAllFlows(self, noPrint=True): if noPrint: iprint = lambda *x: None else: iprint = lambda *x: sys.stdout.write(" ".join(map(str, x)) + '\n') allSources = [] allClients = [] allSourlients = [] allFlows = [] if not os.path.isdir(self.rootPath): return 1 for cluster in self.clusters: pxm = PXManager(self.rootPath + cluster + '/') if pxm.initNames(): #print (self.rootPath + cluster + " inexistant!") continue clients, sourlients, sources, aliases = pxm.getFlowNames( tuple=True) # Populate flowCluster for current cluster pxm.getFlowDict(self.sourceCluster, sources, 'source', cluster) pxm.getFlowDict(self.clientCluster, clients, 'client', cluster) pxm.getFlowDict(self.sourlientCluster, sourlients, 'sourlient', cluster) allSources.extend(sources) allClients.extend(clients) allSourlients.extend(sourlients) iprint("%s" % (80 * '#')) iprint("CLUSTER %s" % cluster.upper()) iprint("%s" % (80 * '#')) iprint("sources (%s): %s" % (len(sources), sources)) iprint("clients (%s): %s" % (len(clients), clients)) iprint("sourlients (%s): %s" % (len(sourlients), sourlients)) #print "aliases: %s" % aliases iprint() pxm = PXManager() pxm.initNames() self.flowCluster = self.createFlowDict() self.dupSources = pxm.identifyDuplicate(allSources) self.dupClients = pxm.identifyDuplicate(allClients) self.dupSourlients = pxm.identifyDuplicate(allSourlients) self.allSources = pxm.removeDuplicate(allSources) self.allClients = pxm.removeDuplicate(allClients) self.allSourlients = pxm.removeDuplicate(allSourlients) self.allFlows.extend(allSources) self.allFlows.extend(allClients) self.allFlows.extend(allSourlients) self.dupFlows = pxm.identifyDuplicate(allFlows) self.allFlows = pxm.removeDuplicate(allFlows) iprint("Duplicate between sources from all clusters: %s" % self.dupSources) iprint("Duplicate between clients from all clusters: %s" % self.dupClients) iprint("Duplicate between sourlients from all clusters: %s" % self.dupSourlients) iprint( "Duplicate beetween flows (sources, clients, sourlients) from all clusters: %s" % self.dupFlows) iprint() keys = self.flowCluster.keys() keys.sort() for key in keys: if len(self.flowCluster[key]) > 1: iprint("%s: %s" % (key, self.flowCluster[key])) iprint("source cluster(%s)" % len(self.sourceCluster)) iprint(self.sourceCluster) iprint("client cluster(%s)" % len(self.clientCluster)) iprint(self.clientCluster) iprint("sourlient cluster(%s)" % len(self.sourlientCluster)) iprint(self.sourlientCluster) iprint("flow cluster(%s)" % len(self.flowCluster)) iprint()
sys.path.insert(1,sys.path[0] + '/../importedLibs') sys.path.append(sys.path[0] + "/../") sys.path.append("/apps/pds/tools/Columbo/lib") # Local imports import PXPaths; PXPaths.normalPaths() from PXManager import PXManager from Logger import Logger from DirectRoutingParser import DirectRoutingParser import ColumboPath #config = ConfigParser.ConfigParser() #config.readfp(open(ColumboPath.FULL_MAIN_CONF)) #logname = config.get('SEARCH_AND_RESEND', 'logname') logname = "/tmp/tmpFlow.log" logger = Logger(logname, 'INFO', 'SAS').getLogger() manager = PXManager() manager.setLogger(logger) manager.initNames() drp = DirectRoutingParser(PXPaths.ROUTING_TABLE, [], logger) drp.printErrors = False drp.parseAlias() flows = manager.getAllFlowNames(False, drp) print " ".join(flows) os.remove(logname)