def __init__(self, **kwarg): PluginBase.__init__(self, **kwarg) # Set up aCT DB connection self.log = core_utils.make_logger(baseLogger, 'aCT submitter', method_name='__init__') self.conf = aCTConfigARC() self.actDB = aCTDBPanda(self.log, self.conf.get(["db", "file"]))
def __init__(self, ceflavour=['ARC-CE']): # Get agent name from /path/to/aCTAgent.py self.name = os.path.basename(sys.argv[0])[:-3] # logger self.logger = aCTLogger.aCTLogger(self.name) self.log = self.logger() self.criticallogger = aCTLogger.aCTLogger('aCTCritical', arclog=False) self.criticallog = self.criticallogger() # config self.conf = aCTConfig.aCTConfigAPP() self.arcconf = aCTConfig.aCTConfigARC() self.tmpdir = str(self.arcconf.get(['tmp', 'dir'])) # database self.dbarc = aCTDBArc.aCTDBArc(self.log) self.dbcondor = aCTDBCondor.aCTDBCondor(self.log) self.dbpanda = aCTDBPanda.aCTDBPanda(self.log) # APFMon self.apfmon = aCTAPFMon.aCTAPFMon(self.conf) # CRIC info self.flavour = ceflavour self.cricparser = aCTCRICParser.aCTCRICParser(self.log) self.sites = {} self.osmap = {} self.sitesselect = '' # start time for periodic restart self.starttime = time.time() self.log.info("Started %s", self.name)
def __init__(self,logger,dbname="aCTjobs.db"): aCTDB.__init__(self, logger, dbname) conf = aCTConfig.aCTConfigARC() self.proxydir = conf.get(["voms","proxystoredir"]) # mapping from Job class attribute types to column types self.jobattrmap = {int: 'integer', str: 'varchar(255)', arc.JobState: 'varchar(255)', arc.StringList: 'varchar(1024)', arc.URL: 'varchar(255)', arc.Period: 'int', arc.Time: 'datetime', arc.StringStringMap: 'varchar(1024)'} ignoremems=['STDIN', 'STDOUT', 'STDERR', 'STAGEINDIR', 'STAGEOUTDIR', 'SESSIONDIR', 'JOBLOG', 'JOBDESCRIPTION', 'JobDescriptionDocument'] # Attributes of Job class mapped to DB column type self.jobattrs={} j=arc.Job() for i in dir(j): if re.match('^__',i): continue if i in ignoremems: continue if type(getattr(j, i)) in self.jobattrmap: self.jobattrs[i] = type(getattr(j, i))
def __init__(self, **kwarg): PluginBase.__init__(self, **kwarg) # Set up aCT DB connection self.log = core_utils.make_logger(baseLogger, 'aCT submitter', method_name='__init__') self.conf = aCTConfigARC() self.actDB = aCTDBPanda(self.log, self.conf.get(["db", "file"])) # Get proxy info # TODO: specify DN in conf instead cred_type = arc.initializeCredentialsType( arc.initializeCredentialsType.SkipCredentials) uc = arc.UserConfig(cred_type) uc.ProxyPath(str(self.conf.get(['voms', 'proxypath']))) cred = arc.Credential(uc) dn = cred.GetIdentityName() self.log.info("Running under DN %s" % dn) # Set up proxy map (prod/pilot roles) self.proxymap = {} actp = aCTProxy(self.log) for role in self.conf.getList(['voms', 'roles', 'item']): attr = '/atlas/Role=' + role proxyid = actp.getProxyId(dn, attr) if not proxyid: raise Exception( "Proxy with DN {0} and attribute {1} was not found in proxies table" .format(dn, attr)) self.proxymap[role] = proxyid
def __init__(self): """Initialize object's attributes.""" self.logger = logging.getLogger(__name__) self.arcdb = aCTDBArc.aCTDBArc(self.logger) self.clidb = clientdb.ClientDB(self.logger) # TODO: if and when sites from arc config are used, move everything # that uses arc config to this class arcconf = aCTConfig.aCTConfigARC() self.tmpdir = arcconf.get(['tmp', 'dir'])
def __init__(self): aCTProcess.__init__(self) self.conf = aCTConfig.aCTConfigARC() self.pm = aCTProxy(self.log) self.tstamp = datetime.datetime.utcnow() - datetime.timedelta( 0, self.pm.interval) if self._updateLocalProxies() == 0: # no local proxies in proxies table yet, better populate it self._updateRolesFromConfig() self._updateMyProxies()
def __init__(self): self.conf = aCTConfig.aCTConfigARC() self.logger = aCTLogger.aCTLogger("aCTReport") self.log = self.logger() self.criticallogger = aCTLogger.aCTLogger('aCTCritical', arclog=False) self.criticallog = self.criticallogger() #self.db=aCTDB.aCTDB(None,self.conf.get(["db","file"])) self.db = aCTDBArc.aCTDBArc(self.log, self.conf.get(["db", "file"])) self.pandadb = aCTDBPanda.aCTDBPanda(self.log, self.conf.get(["db", "file"]))
def __init__(self): """Initialize all attributes.""" # get name, remove .py from the end self.name = os.path.basename(sys.argv[0])[:-3] self.arcconf = aCTConfig.aCTConfigARC() self.logger = aCTLogger.aCTLogger(self.name) self.log = self.logger() self.clidb = clientdb.ClientDB(self.log) self.arcdb = aCTDBArc.aCTDBArc(self.log) self.log.info('Started {}'.format(self.name))
def bootstrap_conf(): '''Check config is ok''' try: arcconf = aCTConfigARC() except Exception as e: print('Error processing ARC config file: %s' % str(e)) sys.exit(1) try: atlasconf = aCTConfigAPP() except Exception as e: print('Error processing APP config file: %s' % str(e)) sys.exit(1)
def __init__(self, args): # Check we have the right ARC version self.checkARC() # xml config file self.conf = aCTConfig.aCTConfigARC() self.appconf = aCTConfig.aCTConfigAPP() # Create required directories tmpdir = self.conf.get(["tmp", "dir"]) self.makeDirs(tmpdir) self.makeDirs(os.path.join(tmpdir, 'inputfiles')) self.makeDirs(os.path.join(tmpdir, 'eventranges')) self.makeDirs(os.path.join(tmpdir, 'failedlogs')) self.makeDirs(self.conf.get(["voms", "proxystoredir"]), 0o700) self.makeDirs(self.conf.get(["logger", "logdir"])) # logger self.logger = aCTLogger.aCTLogger("aCTMain") self.log = self.logger() # Check if we should run self.shouldrun = not os.path.exists( os.path.join(self.conf.get(["actlocation", "dir"]), "act.stop")) if not self.shouldrun: self.log.warning( "Detected act.stop file, won't start child processes") # daemon operations if len(args) >= 2: self.daemon(args[1]) # process manager try: if self.shouldrun: self.procmanager = aCTProcessManager.aCTProcessManager( self.log, self.conf, self.appconf) except Exception as e: self.log.critical("*** Unexpected exception! ***") self.log.critical(traceback.format_exc()) self.log.critical("*** Process exiting ***") raise e
def main(): conf = aCTConfigARC() criticallog = '%s/aCTCritical.log' % conf.get(["logger", "logdir"]) criticalerrors = 0 lastcritical = '' now = datetime.now() with open(criticallog) as f: for line in f: t = re.match('\[(\d\d\d\d\-\d\d\-\d\d\s\d\d:\d\d:\d\d,\d\d\d)\].*\[CRITICAL\]', line) if t: if abs(now-datetime.strptime(t.group(1), '%Y-%m-%d %H:%M:%S,%f')) < timedelta(hours=1): criticalerrors += 1 lastcritical = line else: lastcritical += line if criticalerrors: print('%d critical errors in the last hour\n' % criticalerrors) print('Last critical error:\n%s' % lastcritical)
def __init__(self): # Get agent name from /path/to/aCTAgent.py self.name = os.path.basename(sys.argv[0])[:-3] # logger self.logger = aCTLogger.aCTLogger(self.name) self.log = self.logger() self.criticallogger = aCTLogger.aCTLogger('aCTCritical', arclog=False) self.criticallog = self.criticallogger() # config self.conf = aCTConfig.aCTConfigATLAS() self.arcconf = aCTConfig.aCTConfigARC() # database self.dbarc = aCTDBArc.aCTDBArc(self.log, self.conf.get(["db", "file"])) self.dbpanda = aCTDBPanda.aCTDBPanda(self.log, self.conf.get(["db", "file"])) # start time for periodic restart self.starttime = time.time() self.log.info("Started %s", self.name)
def __init__(self, **kwarg): PluginBase.__init__(self, **kwarg) # Set up aCT DB connection self.log = core_utils.make_logger(baseLogger, 'aCT submitter', method_name='__init__') self.conf = aCTConfigARC() self.actDB = aCTDBPanda(self.log, self.conf.get(["db", "file"])) # Credential dictionary role: proxy file self.certs = dict( zip([ r.split('=')[1] for r in list(harvester_config.credmanager.voms) ], list(harvester_config.credmanager.outCertFile))) # Map of role to aCT proxyid self.proxymap = {} # Get proxy info # TODO: better to send aCT the proxy file and let it handle it for role, proxy in self.certs.items(): cred_type = arc.initializeCredentialsType( arc.initializeCredentialsType.SkipCredentials) uc = arc.UserConfig(cred_type) uc.ProxyPath(str(proxy)) cred = arc.Credential(uc) dn = cred.GetIdentityName() self.log.info("Proxy {0} with DN {1} and role {2}".format( proxy, dn, role)) actp = aCTProxy(self.log) attr = '/atlas/Role=' + role proxyid = actp.getProxyId(dn, attr) if not proxyid: raise Exception( "Proxy with DN {0} and attribute {1} was not found in proxies table" .format(dn, attr)) self.proxymap[role] = proxyid
def __init__(self): # Get agent name from /path/to/aCTAgent.py self.name = os.path.basename(sys.argv[0])[:-3] # logger self.logger = aCTLogger.aCTLogger(self.name) self.log = self.logger() self.criticallogger = aCTLogger.aCTLogger('aCTCritical', arclog=False) self.criticallog = self.criticallogger() # config self.conf = aCTConfig.aCTConfigAPP() self.arcconf = aCTConfig.aCTConfigARC() self.tmpdir = str(self.arcconf.get(['tmp', 'dir'])) # database self.dbarc = aCTDBArc.aCTDBArc(self.log) self.dbldmx = aCTDBLDMX.aCTDBLDMX(self.log) # Rucio client self.rucio = Client() # start time for periodic restart self.starttime = time.time() self.log.info("Started %s", self.name)
def __init__(self, logger, tablename): self.log = logger self.table = tablename self.conf = aCTConfigARC() self.db = aCTDBMS.getDB(self.log, self.conf)
from act.arc.aCTDBArc import aCTDBArc from act.atlas.aCTDBPanda import aCTDBPanda from act.common.aCTLogger import aCTLogger from act.common.aCTConfig import aCTConfigARC try: service_id, webpage_url = sys.argv[1:3] except: print('Usage: kibana.py service_id webpage_url') sys.exit(1) logger = aCTLogger('kibana probe') log = logger() arcdb = aCTDBArc(log) pandadb = aCTDBPanda(log) config = aCTConfigARC() def getARCJobs(): return str(arcdb.getNArcJobs('TRUE')) def getARCSlots(): jobs = arcdb.getArcJobsInfo("state='Running'", ['RequestedSlots']) slots = 0 for j in jobs: slots += j['RequestedSlots'] return str(slots) def getPandaNotStarted():
def main(): # parse arguments parser = argparse.ArgumentParser(description='Submit xRSL job to aCT') parser.add_argument('-p', '--proxy', default=None, help='custom path to proxy certificate') parser.add_argument('-s', '--site', default='default', help='specific site to submit job to') parser.add_argument('-v', '--verbose', action='store_true', help='show more information') parser.add_argument('xRSL', help='path to xRSL file') clicommon.showHelpOnCommandOnly(parser) args = parser.parse_args() # logging logFormat = "[%(asctime)s] [%(filename)s:%(lineno)d] [%(levelname)s] - %(message)s" if args.verbose: logging.basicConfig(format=logFormat, level=logging.DEBUG, stream=sys.stdout) else: logging.basicConfig(format=logFormat, level=logging.DEBUG, filename=os.devnull) # get ID given proxy proxyid = clicommon.getProxyIdFromProxy(args.proxy) # check site try: jobmgr.checkSite(args.site) # use default path for sites.json except errors.NoSuchSiteError as e: print('error: site \'{}\' is not configured'.format(args.site)) sys.exit(4) except Exception as e: print('error: could not read site config: {}'.format(str(e))) sys.exit(11) # TODO: refactor error handling # get and check job description try: jobdesc = readXRSL(args.xRSL) jobmgr.checkJobDesc(jobdesc) except errors.InvalidJobDescriptionError: print('error: invalid job description') sys.exit(6) except IOError: print('error: could not read xRSL file') sys.exit(7) # insert job arcconf = aCTConfig.aCTConfigARC() clidb = clientdb.ClientDB() jobid = clidb.insertJobAndDescription(jobdesc, proxyid, args.site) print('Successfully inserted job with id {}'.format(jobid))
def __init__(self, logger): self.log=logger self.conf=aCTConfig.aCTConfigATLAS() self.arcconf=aCTConfig.aCTConfigARC() self.tparse = 0 self.getSites()
def bootstrap_dirs(): '''Make necessary directories''' arcconf = aCTConfigARC() os.makedirs(arcconf.get(['tmp', 'dir']), mode=0o755, exist_ok=True) os.makedirs(arcconf.get(['logger', 'logdir']), mode=0o755, exist_ok=True)
''' # remove file first proxypath=self.getProxyPath(id) if os.path.isfile(proxypath): os.remove(proxypath) c=self.getCursor() c.execute("DELETE FROM proxies WHERE id="+str(id)) self.conn.commit() if __name__ == '__main__': import logging, sys log = logging.getLogger() out = logging.StreamHandler(sys.stdout) log.addHandler(out) conf = aCTConfig.aCTConfigARC() adb = aCTDBArc(log, dbname=conf.get(["db","file"])) adb.createTables() usercfg = arc.UserConfig("", "") usercfg.Timeout(10) # Simple job description which outputs hostname to stdout jobdescstring = "&(executable=/bin/hostname)(stdout=stdout)" # Parse job description jobdescs = arc.JobDescriptionList() if not arc.JobDescription_Parse(jobdescstring, jobdescs): logging.error("Invalid job description") exit(1)