def doMergePush(self, file, jobtype, select, timefield, tstart=None, tend=None, days=None,pop=True,dateformat='%y%m%d'): """Merge the db and file data: - Push the fresh data from the db table - Pop the old data from the file data """ fileData = self.doFile(file) fRows = fileData['rows'] sel = sqlSelect() sel.prepareTime(None,tstart,tend,field='',days=days) times = sel.times() if times.get('start') != None or times.get('end') != None: if times.get('start') == None: tstart = datetime.strptime("%s" % fRows[0],dateformat) - timedelta(days=1) else: tstart = times.get('start') if times.get('end') == None: tend = datetime.utcnow() + timedelta(days=1) else: tend = times.get('end') days= None fRows = dict(fRows) dbTableData = self.doTaskBuffer(jobtype, select, timefield, tstart, tend, days) fDbRows = dict(dbTableData['rows']) # exclude duplicates fRows.update(fDbRows) f = [] for d in sorted(fRows.keys(),reverse=True): f.append( [d,fRows[d]] ) if pop==True: f.pop() fRows=f return { 'header' : fileData['header'], 'rows' : fRows }
def doQuery(self,site="panda.mon.bamboo", tstart=None,tend=None,hours=6,days=None, level='', type='',module=None,limit=1000,count=None,message=None): """ Show the the Selected Panda logger messages <ul> <li> site - the patter to select the messaged from <a href='https://twiki.cern.ch/twiki/bin/view/PanDA/PandaPlatform#CommaParam'>sites</a> <li> tstart - the start time of the period the messages belongs to <li> tend - the start time of the period the messages belongs to <li> hours - select the messages for the last '24*days+hours' hours <li> days - select the messages for the last 'days+hours/24' days <li> level - select messages of the certain level only ('INFO', 'WARNING', 'DEBUG', 'ERROR') <li> type - the message 'type' (for example: 'taskbrokerage' ) <li> limit - the max number of records to be fetched from the Db table <li> message- the <a href='https://twiki.cern.ch/twiki/bin/view/PanDA/PandaPlatform#CommaParam'>comma separted list</a> of the patterns to select the messages </ul> """ sitename = "category=<b>%s</b>" % site if site !='' else '' typename = "type=<b>%s</b>" % type if type !='' else '' levelname = "level=<b>%s</b>" % level if level !='' else '' levelmap = { 'INFO':20, 'WARNING':30 , 'DEBUG':10, 'ERROR':40 } levelname = '' levelcode= '' if level != None and level != '': levelcode = levelmap[level.upper()]; levelname = "level=<b>%s</b>" % level if tstart != None and tend!=None: hours=None duration = sqlSelect().prepareTime(hours,tstart,tend,'f',days).times('duration') hrs =duration.days*24+duration.seconds/3600 q = pmt.getLogInfo(site,type,levelcode,hours=hours,limit=limit,message=message,module=module,days=days,tstart=tstart,tend=tend) header = q['header'] rows = q['rows'] if len(rows) <=0: header = [] main = {} main['header'] = header main['levelmap'] = levelmap main['info'] = rows main['limit'] = limit try: count = int(count) main['count'] = count except: pass main['limit'] = limit main['time'] = {} main['params'] = {'hours':hrs} self.publish(main) self.publishNav(' %s' % ' '.join((sitename, typename,levelname)) ) self.publish( "%s/%s" % ( self.server().fileScriptURL(),"monitor/%s.js" % "logmonitor" ),role="script") self.publishTitle("Panda logging monitor (logged incidents) over last %s hours ( %.2f days ) " % (hrs,hrs/24.0 ) )
def doMergePush(self, file, jobtype, select, timefield, tstart=None, tend=None, days=None, pop=True, dateformat='%y%m%d'): """Merge the db and file data: - Push the fresh data from the db table - Pop the old data from the file data """ fileData = self.doFile(file) fRows = fileData['rows'] sel = sqlSelect() sel.prepareTime(None, tstart, tend, field='', days=days) times = sel.times() if times.get('start') != None or times.get('end') != None: if times.get('start') == None: tstart = datetime.strptime("%s" % fRows[0], dateformat) - timedelta(days=1) else: tstart = times.get('start') if times.get('end') == None: tend = datetime.utcnow() + timedelta(days=1) else: tend = times.get('end') days = None fRows = dict(fRows) dbTableData = self.doTaskBuffer(jobtype, select, timefield, tstart, tend, days) fDbRows = dict(dbTableData['rows']) # exclude duplicates fRows.update(fDbRows) f = [] for d in sorted(fRows.keys(), reverse=True): f.append([d, fRows[d]]) if pop == True: f.pop() fRows = f return {'header': fileData['header'], 'rows': fRows}
def doQuery(self, tstart=None, tend=None, hours=6, days=None): if tstart != None and tend != None: hours = None duration = sqlSelect().prepareTime(hours, tstart, tend, 'f', days).times('duration') hrs = duration.days * 24 + duration.seconds / 3600 q = pmt.logSummary(hours, tstart, tend, days) header = q['header'] rows = q['rows'] main = {} main['header'] = header if len(rows) > 0 else [] main['info'] = rows main['time'] = {} main['params'] = {'hours': hrs} self.publish(main) # elf.publishNav('Incindent log summary: "%s". "%s"' % ( query , timer ) ) # I know the cyber security will be mad ;-) VF. self.publish( "%s/%s" % (self.server().fileScriptURL(), "monitor/%s.js" % "logsummary"), role="script") self.publishTitle("Summary of Panda logged incidents, last %s hours" % hrs)
def doQuery(self, site="panda.mon.bamboo", tstart=None, tend=None, hours=6, days=None, level='', type='', module=None, limit=1000, count=None, message=None): """ Show the the Selected Panda logger messages <ul> <li> site - the patter to select the messaged from <a href='https://twiki.cern.ch/twiki/bin/view/PanDA/PandaPlatform#CommaParam'>sites</a> <li> tstart - the start time of the period the messages belongs to <li> tend - the start time of the period the messages belongs to <li> hours - select the messages for the last '24*days+hours' hours <li> days - select the messages for the last 'days+hours/24' days <li> level - select messages of the certain level only ('INFO', 'WARNING', 'DEBUG', 'ERROR') <li> type - the message 'type' (for example: 'taskbrokerage' ) <li> limit - the max number of records to be fetched from the Db table <li> message- the <a href='https://twiki.cern.ch/twiki/bin/view/PanDA/PandaPlatform#CommaParam'>comma separted list</a> of the patterns to select the messages </ul> """ sitename = "category=<b>%s</b>" % site if site != '' else '' typename = "type=<b>%s</b>" % type if type != '' else '' levelname = "level=<b>%s</b>" % level if level != '' else '' levelmap = {'INFO': 20, 'WARNING': 30, 'DEBUG': 10, 'ERROR': 40} levelname = '' levelcode = '' if level != None and level != '': levelcode = levelmap[level.upper()] levelname = "level=<b>%s</b>" % level if tstart != None and tend != None: hours = None duration = sqlSelect().prepareTime(hours, tstart, tend, 'f', days).times('duration') hrs = duration.days * 24 + duration.seconds / 3600 q = pmt.getLogInfo(site, type, levelcode, hours=hours, limit=limit, message=message, module=module, days=days, tstart=tstart, tend=tend) header = q['header'] rows = q['rows'] if len(rows) <= 0: header = [] main = {} main['header'] = header main['levelmap'] = levelmap main['info'] = rows main['limit'] = limit try: count = int(count) main['count'] = count except: pass main['limit'] = limit main['time'] = {} main['params'] = {'hours': hrs} self.publish(main) self.publishNav(' %s' % ' '.join( (sitename, typename, levelname))) self.publish( "%s/%s" % (self.server().fileScriptURL(), "monitor/%s.js" % "logmonitor"), role="script") self.publishTitle( "Panda logging monitor (logged incidents) over last %s hours ( %.2f days ) " % (hrs, hrs / 24.0))