def proxy(self): try: proxy = Proxy(self.defaultDelegation) except CredentialException, ex: self.logger.debug(ex) raise EnvironmentException('Problem with Grid environment: %s ' % ex._message)
def setUp(self): """ Setup for unit tests """ logging.basicConfig( level=logging.DEBUG, format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s', datefmt='%m-%d %H:%M', filename='proxy_unittests.log', filemode='w') logger_name = 'ProxyTest' self.logger = logging.getLogger(logger_name) self.dict = { 'logger': self.logger, 'vo': 'cms', 'group': group, 'role': role, 'myProxySvr': myProxySvr, 'proxyValidity': '192:00', 'min_time_left': 36000, 'uisource': uiPath } self.proxyPath = None self.proxy = Proxy(self.dict)
def testMyProxyEnvironment(self): """ Test the myProxyEnvironment context manager In this test a new Proxy and MyProxy are initialized """ myProxy = Proxy(self.dict) # Create the proxy myProxy.create() proxyPath = myProxy.getProxyFilename() userDN = myProxy.getSubject() self.assertTrue(os.path.exists(proxyPath)) # Delegate and check the proxy myProxy.delegate(credential=proxyPath, serverRenewer=True) valid = myProxy.checkMyProxy() self.assertTrue(valid) # Make sure X509_USER_PROXY exists only in the context manager and corresponds to a file if 'X509_USER_PROXY' in os.environ: del os.environ['X509_USER_PROXY'] self.assertFalse('X509_USER_PROXY' in os.environ) with myProxyEnvironment(userDN=userDN, serverCert=serverCert, serverKey=serverKey, myproxySrv='myproxy.cern.ch', proxyDir='/tmp/', logger=self.logger): self.assertTrue('X509_USER_PROXY' in os.environ) self.assertTrue(os.path.exists(os.environ['X509_USER_PROXY'])) self.assertFalse('X509_USER_PROXY' in os.environ) return
def execute(self, *args, **kwargs): result = None proxycfg = {'vo': kwargs['task']['tm_user_vo'], 'logger': self.logger, 'myProxySvr': self.config.Services.MyProxy, 'proxyValidity' : '144:0', 'min_time_left' : 36000, ## do we need this ? or should we use self.myproxylen? 'userDN' : kwargs['task']['tm_user_dn'], 'group' : kwargs['task']['tm_user_group'] if kwargs['task']['tm_user_group'] else '', 'role' : kwargs['task']['tm_user_role'] if kwargs['task']['tm_user_role'] else '', 'server_key': self.config.MyProxy.serverhostkey, 'server_cert': self.config.MyProxy.serverhostcert, 'serverDN': self.config.MyProxy.serverdn, 'uisource': getattr(self.config.MyProxy, 'uisource', ''), 'credServerPath': self.config.MyProxy.credpath, 'myproxyAccount' : self.server['host'], 'cleanEnvironment' : getattr(self.config.MyProxy, 'cleanEnvironment', False) } proxy = Proxy(proxycfg) userproxy = proxy.getProxyFilename(serverRenewer=True) proxy.logonRenewMyProxy() timeleft = proxy.getTimeLeft(userproxy) if timeleft is None or timeleft <= 0: msg = "Impossible to retrieve proxy from %s for %s." % (proxycfg['myProxySvr'], proxycfg['userDN']) raise TaskWorkerException(msg) else: kwargs['task']['user_proxy'] = userproxy result = Result(task=kwargs['task'], result='OK') return result
def __init__(self, config): """ initialize properties specified from config """ BaseWorkerThread.__init__(self) # set the workqueue service for REST call self.config = config # need to get campaign, user, owner info self.agentInfo = initAgentInfo(self.config) self.summaryLevel = config.AnalyticsDataCollector.summaryLevel proxyArgs = {'logger': logging.getLogger(), 'cleanEnvironment': True} self.proxy = Proxy(proxyArgs) self.proxyFile = self.proxy.getProxyFilename() # X509_USER_PROXY self.userCertFile = self.proxy.getUserCertFilename() # X509_USER_CERT # credential lifetime warning/error thresholds, in days self.credThresholds = {'proxy': {'error': 3, 'warning': 5}, 'certificate': {'error': 10, 'warning': 20}} # Monitoring setup self.userAMQ = getattr(config.AgentStatusWatcher, "userAMQ", None) self.passAMQ = getattr(config.AgentStatusWatcher, "passAMQ", None) self.postToAMQ = getattr(config.AgentStatusWatcher, "enableAMQ", False) self.topicAMQ = getattr(config.AgentStatusWatcher, "topicAMQ", None) self.hostPortAMQ = getattr(config.AgentStatusWatcher, "hostPortAMQ", [('cms-mb.cern.ch', 61313)]) # T0 doesn't have WorkQueue, so some monitoring/replication code has to be skipped here if hasattr(self.config, "Tier0Feeder"): self.isT0agent = True self.producer = "tier0wmagent" else: self.isT0agent = False self.producer = "wmagent" localWQUrl = config.AnalyticsDataCollector.localQueueURL self.workqueueDS = WorkQueueDS(localWQUrl)
def get_proxy(self, ad): result = None vo = 'cms' group = '' role = '' if 'CRAB_UserVO' in ad and ad['CRAB_UserVO']: vo = ad['CRAB_UserVO'] if 'CRAB_UserGroup' in ad and ad['CRAB_UserGroup'] and ad['CRAB_UserGroup'] != classad.Value.Undefined: group = ad['CRAB_UserGroup'] if 'CRAB_UserRole' in ad and ad['CRAB_UserRole'] and ad['CRAB_UserRole'] != classad.Value.Undefined: role = ad['CRAB_UserRole'] print(vo, group, role) proxycfg = {'vo': vo, 'logger': self.logger, 'myProxySvr': self.config.Services.MyProxy, 'myproxyAccount': self.config.TaskWorker.resturl, 'proxyValidity' : '144:0', 'min_time_left' : MINPROXYLENGTH, ## do we need this ? or should we use self.myproxylen? 'userDN' : ad['CRAB_UserDN'], 'group' : group, 'role' : role, 'server_key': self.config.MyProxy.serverhostkey, 'server_cert': self.config.MyProxy.serverhostcert, 'serverDN': self.config.MyProxy.serverdn, 'uisource': getattr(self.config.MyProxy, 'uisource', ''), 'credServerPath': self.config.MyProxy.credpath, 'cleanEnvironment' : getattr(self.config.MyProxy, 'cleanEnvironment', False)} proxy = Proxy(proxycfg) userproxy = proxy.getProxyFilename(serverRenewer=True) proxy.logonRenewMyProxy() timeleft = proxy.getTimeLeft(userproxy) if timeleft is None or timeleft <= 0: self.logger.error("Impossible to retrieve proxy from %s for %s." %(proxycfg['myProxySvr'], proxycfg['userDN'])) raise Exception("Failed to retrieve proxy.") return userproxy
def __init__(self, config): BasePlugin.__init__(self, config) self.locationDict = {} myThread = threading.currentThread() daoFactory = DAOFactory(package="WMCore.WMBS", logger=myThread.logger, dbinterface=myThread.dbi) self.locationAction = daoFactory(classname="Locations.GetSiteInfo") self.packageDir = None if os.path.exists( os.path.join(getWMBASE(), 'src/python/WMCore/WMRuntime/Unpacker.py')): self.unpacker = os.path.join( getWMBASE(), 'src/python/WMCore/WMRuntime/Unpacker.py') else: self.unpacker = os.path.join(getWMBASE(), 'WMCore/WMRuntime/Unpacker.py') self.agent = getattr(config.Agent, 'agentName', 'WMAgent') self.sandbox = None self.scriptFile = config.JobSubmitter.submitScript self.defaultTaskPriority = getattr(config.BossAir, 'defaultTaskPriority', 0) self.maxTaskPriority = getattr(config.BossAir, 'maxTaskPriority', 1e7) self.jobsPerSubmit = getattr(config.JobSubmitter, 'jobsPerSubmit', 200) self.extraMem = getattr(config.JobSubmitter, 'extraMemoryPerCore', 500) # Required for global pool accounting self.acctGroup = getattr(config.BossAir, 'acctGroup', "production") self.acctGroupUser = getattr(config.BossAir, 'acctGroupUser', "cmsdataops") # Build a requirement string. All CMS resources match DESIRED_Sites on the START # expression side; however, there are currently some resources (T2_CH_CERN_HLT) # that are missing the REQUIRED_OS logic. Hence, we duplicate it here. # TODO(bbockelm): Remove reqStr once HLT has upgraded. self.reqStr = ( '((REQUIRED_OS=?="any") || ' '(GLIDEIN_REQUIRED_OS =?= "any") || ' 'stringListMember(GLIDEIN_REQUIRED_OS, REQUIRED_OS)) && ' '(AuthenticatedIdentity =!= "*****@*****.**")') if hasattr(config.BossAir, 'condorRequirementsString'): self.reqStr = config.BossAir.condorRequirementsString # x509 proxy handling proxy = Proxy({'logger': myThread.logger}) self.x509userproxy = proxy.getProxyFilename() self.x509userproxysubject = proxy.getSubject() self.x509userproxyfqan = proxy.getAttributeFromProxy( self.x509userproxy) # Remove the x509 ads if the job is matching a volunteer resource self.x509Expr = 'ifThenElse("$$(GLIDEIN_CMSSite)" =?= "T3_CH_Volunteer",undefined,"%s")' return
def __init__(self, config): BasePlugin.__init__(self, config) self.locationDict = {} myThread = threading.currentThread() daoFactory = DAOFactory(package="WMCore.WMBS", logger=myThread.logger, dbinterface=myThread.dbi) self.locationAction = daoFactory(classname="Locations.GetSiteInfo") self.packageDir = None # if agent is running in a container, Unpacker.py must come from a directory # on the host so the condor schedd can see it # config.General.workDir should always be bind mounted to the container if getattr(config.Agent, "isDocker", False): unpackerPath = os.path.join(config.General.workDir + "/Docker/WMRuntime/Unpacker.py") else: unpackerPath = os.path.join( getWMBASE(), 'src/python/WMCore/WMRuntime/Unpacker.py') if os.path.exists(unpackerPath): self.unpacker = unpackerPath else: self.unpacker = os.path.join(getWMBASE(), 'WMCore/WMRuntime/Unpacker.py') self.agent = getattr(config.Agent, 'agentName', 'WMAgent') self.sandbox = None self.scriptFile = config.JobSubmitter.submitScript self.defaultTaskPriority = getattr(config.BossAir, 'defaultTaskPriority', 0) self.maxTaskPriority = getattr(config.BossAir, 'maxTaskPriority', 1e7) self.jobsPerSubmit = getattr(config.JobSubmitter, 'jobsPerSubmit', 200) self.extraMem = getattr(config.JobSubmitter, 'extraMemoryPerCore', 500) # Required for global pool accounting self.acctGroup = getattr(config.BossAir, 'acctGroup', "production") self.acctGroupUser = getattr(config.BossAir, 'acctGroupUser', "cmsdataops") if hasattr(config.BossAir, 'condorRequirementsString'): self.reqStr = config.BossAir.condorRequirementsString else: self.reqStr = None # x509 proxy handling proxy = Proxy({'logger': myThread.logger}) self.x509userproxy = proxy.getProxyFilename() # These are added now by the condor client #self.x509userproxysubject = proxy.getSubject() #self.x509userproxyfqan = proxy.getAttributeFromProxy(self.x509userproxy) return
def proxy(self): try: proxy = Proxy(self.defaultDelegation) except CredentialException as ex: self.logger.debug(ex) raise EnvironmentException('Problem with Grid environment: %s ' % str(ex)) return proxy
def execute(self, *args, **kwargs): result = None proxycfg = { 'vo': kwargs['task']['tm_user_vo'], 'logger': self.logger, 'myProxySvr': self.config.Services.MyProxy, 'proxyValidity': '24:0', 'min_time_left': 36000, ## do we need this ? or should we use self.myproxylen? 'userDN': kwargs['task']['tm_user_dn'], 'group': kwargs['task']['tm_user_group'] if kwargs['task']['tm_user_group'] else '', 'role': kwargs['task']['tm_user_role'] if kwargs['task']['tm_user_role'] else '', 'server_key': self.config.MyProxy.serverhostkey, 'server_cert': self.config.MyProxy.serverhostcert, 'serverDN': self.config.MyProxy.serverdn, 'uisource': self.config.MyProxy.uisource, 'credServerPath': self.config.MyProxy.credpath, } proxy = Proxy(proxycfg) userproxy = proxy.getProxyFilename(serverRenewer=True) proxy.logonRenewMyProxy() timeleft = proxy.getTimeLeft(userproxy) if timeleft is None or timeleft <= 0: msg = "Impossible to retrieve proxy from %s for %s." % ( proxycfg['myProxySvr'], proxycfg['userDN']) self.logger.error("Setting %s as failed" % str(kwargs['task']['tm_taskname'])) configreq = { 'workflow': kwargs['task']['tm_taskname'], 'status': "FAILED", 'subresource': 'failure', 'failure': b64encode(msg) } self.logger.error(str(configreq)) self.server.post(self.resturl, data=urllib.urlencode(configreq)) raise StopHandler(msg) else: kwargs['task']['user_proxy'] = userproxy result = Result(task=kwargs['task'], result='OK') return result
def validate(self): if self.dataset in Dataset.__dsets: return True if self.lumi_mask: self.lumi_mask = self.__get_mask(self.lumi_mask) cred = Proxy({'logger': logging.getLogger("WMCore")}) dbs = DASWrapper(self.dbs_instance, ca_info=cred.getProxyFilename()) baseinfo = dbs.listFileSummaries(dataset=self.dataset) if baseinfo is None or (len(baseinfo) == 1 and baseinfo[0] is None): return False return True
def __call__(self): server = HTTPRequests(self.serverurl, self.proxyfilename) self.logger.debug('Looking up detailed status of task %s' % self.cachedinfo['RequestName']) dictresult, status, reason = server.get( self.uri, data={'workflow': self.cachedinfo['RequestName']}) dictresult = dictresult['result'][0] #take just the significant part if status != 200: msg = "Problem retrieving status:\ninput:%s\noutput:%s\nreason:%s" % ( str(self.cachedinfo['RequestName']), str(dictresult), str(reason)) raise RESTCommunicationException(msg) self.logger.debug( dictresult ) #should be something like {u'result': [[123, u'ciao'], [456, u'ciao']]} self.logger.info("Task name:\t\t\t%s" % self.cachedinfo['RequestName']) self.logger.info("Task status:\t\t\t%s" % dictresult['status']) #Print the url of the panda monitor if dictresult['taskFailureMsg']: self.logger.error( "%sError during task injection:%s\t%s" % (colors.RED, colors.NORMAL, dictresult['taskFailureMsg'])) elif dictresult['jobSetID']: p = Proxy({'logger': self.logger}) username = urllib.quote(p.getUserName()) self.logger.info( "Panda url:\t\t\thttp://panda.cern.ch/server/pandamon/query?job=*&jobsetID=%s&user=%s" % (dictresult['jobSetID'], username)) if dictresult['jobdefErrors']: self.logger.error("%sSubmission partially failed:%s\t%s jobgroup not submittet out of %s:" % (colors.RED, colors.NORMAL,\ dictresult['failedJobdefs'], dictresult['totalJobdefs'])) for error in dictresult['jobdefErrors']: self.logger.info("\t%s" % error) #Print information about jobs states = dictresult['jobsPerStatus'] total = sum(states[st] for st in states) frmt = '' for status in states: frmt += status + ' %s\t' % self._percentageString( states[status], total) if frmt: self.logger.info('Details:\t\t\t%s' % frmt)
def getProxy(defaultDelegation, log): """ _getProxy_ """ log.debug("Retrieving proxy for %s" % defaultDelegation['userDN']) proxy = Proxy(defaultDelegation) proxyPath = proxy.getProxyFilename(True) timeleft = proxy.getTimeLeft(proxyPath) if timeleft is not None and timeleft > 3600: return (True, proxyPath) proxyPath = proxy.logonRenewMyProxy() timeleft = proxy.getTimeLeft(proxyPath) if timeleft is not None and timeleft > 0: return (True, proxyPath) return (False, None)
def __init__(self, config): BasePlugin.__init__(self, config) self.locationDict = {} myThread = threading.currentThread() daoFactory = DAOFactory(package="WMCore.WMBS", logger=myThread.logger, dbinterface=myThread.dbi) self.locationAction = daoFactory(classname="Locations.GetSiteInfo") self.packageDir = None if os.path.exists( os.path.join(getWMBASE(), 'src/python/WMCore/WMRuntime/Unpacker.py')): self.unpacker = os.path.join( getWMBASE(), 'src/python/WMCore/WMRuntime/Unpacker.py') else: self.unpacker = os.path.join(getWMBASE(), 'WMCore/WMRuntime/Unpacker.py') self.agent = getattr(config.Agent, 'agentName', 'WMAgent') self.sandbox = None self.scriptFile = config.JobSubmitter.submitScript self.defaultTaskPriority = getattr(config.BossAir, 'defaultTaskPriority', 0) self.maxTaskPriority = getattr(config.BossAir, 'maxTaskPriority', 1e7) self.jobsPerSubmit = getattr(config.JobSubmitter, 'jobsPerSubmit', 200) # Required for global pool accounting self.acctGroup = getattr(config.BossAir, 'acctGroup', "production") self.acctGroupUser = getattr(config.BossAir, 'acctGroupUser', "cmsdataops") # Build a requirement string self.reqStr = "stringListMember(GLIDEIN_CMSSite, DESIRED_Sites) && ((REQUIRED_OS=?=\"any\") || (GLIDEIN_REQUIRED_OS=?=REQUIRED_OS)) && (TARGET.Cpus >= RequestCpus)" if hasattr(config.BossAir, 'condorRequirementsString'): self.reqStr = config.BossAir.condorRequirementsString # x509 proxy handling proxy = Proxy({'logger': myThread.logger}) self.x509userproxy = proxy.getProxyFilename() self.x509userproxysubject = proxy.getSubject() return
def setupMyProxy(self): """ _setupMyProxy_ Setup a WMCore.Credential.Proxy object with which to retrieve proxies from myproxy using the server Cert """ args = {} if self.setupScript: args['uisource'] = self.setupScript args['server_cert'] = self.serverCert args['server_key'] = self.serverKey args['myProxySvr'] = self.myproxySrv args['credServerPath'] = self.proxyDir args['logger'] = logging return Proxy(args = args)
def tryProxyLogon(self, proxycfg=None): """ Utility function to allow trying with diffenent myproxy configurations. It tries to retrieve a valid proxy from myproxy using the configuration passed as argument. See WMCore.Credential.Proxy for configuration details. If successful returns the proxy filename and list of VOMS groups for later addition via voms-proxy-init. If not rises a TW exception. Note that logonRenewMyProxy() does not rise exceptions. """ # WMCore proxy methods are awfully verbose, reduce logging level when using them with tempSetLogLevel(logger=self.logger, level=logging.ERROR): proxy = Proxy(proxycfg) userproxy = proxy.getProxyFilename( serverRenewer=True) # this only returns a filename proxy.logonRenewMyProxy( ) # this tries to create the proxy, but if it fails it does not rise usergroups = set(proxy.getAllUserGroups( userproxy)) # get VOMS groups from created proxy (if any) timeleft = proxy.getTimeLeft( userproxy ) # this is the way to tell if proxy creation succeeded errmsg = '' if timeleft is None or timeleft <= 0: errmsg = "Impossible to retrieve proxy from %s for %s." % ( proxycfg['myProxySvr'], proxycfg['userDN']) if timeleft < (5 * 24 * 3600): errmsg = "Could not get a proxy valid for at least 5-days from %s for %s." % ( proxycfg['myProxySvr'], proxycfg['userDN']) if errmsg: self.logger.error(errmsg) self.logger.error("Will try again in verbose mode") self.logger.error( "===========PROXY ERROR START ==========================") with tempSetLogLevel(logger=self.logger, level=logging.DEBUG): proxy.logonRenewMyProxy() self.logger.error( "===========PROXY ERROR END ==========================") raise TaskWorkerException(errmsg) hoursleft = timeleft / 3600 minutesleft = (timeleft % 3600) / 60 self.logger.info('retrieved proxy lifetime in h:m: %d:%d', hoursleft, minutesleft) return (userproxy, usergroups)
def __init__(self, config): """ initialize properties specified from config """ BaseWorkerThread.__init__(self) # set the workqueue service for REST call self.config = config # need to get campaign, user, owner info self.agentInfo = initAgentInfo(self.config) self.summaryLevel = config.AnalyticsDataCollector.summaryLevel self.jsonFile = config.AgentStatusWatcher.jsonFile proxyArgs = {'logger': logging.getLogger()} self.proxy = Proxy(proxyArgs) self.proxyFile = self.proxy.getProxyFilename() # X509_USER_PROXY localWQUrl = config.AnalyticsDataCollector.localQueueURL self.workqueueDS = WorkQueueDS(localWQUrl)
def wrapped_func(*args, **kwargs): logger = logging.getLogger("CRABLogger.Utils") myproxyserver = "myproxy.cern.ch" userdn = kwargs['userdn'] defaultDelegation = { 'logger': logger, 'proxyValidity': '192:00', 'min_time_left': 36000, 'server_key': serverKey, 'server_cert': serverCert, } timeleftthreshold = 60 * 60 * 24 mypclient = SimpleMyProxy(defaultDelegation) userproxy = None userhash = sha1(kwargs['userdn']).hexdigest() if serverDN: try: userproxy = mypclient.logonRenewMyProxy( username=userhash, myproxyserver=myproxyserver, myproxyport=7512) except MyProxyException as me: # Unsure if this works in standalone mode... cherrypy.log(str(me)) cherrypy.log(str(serverKey)) cherrypy.log(str(serverCert)) invalidp = InvalidParameter( "Impossible to retrieve proxy from %s for %s and hash %s" % (myproxyserver, kwargs['userdn'], userhash)) setattr(invalidp, 'trace', str(me)) raise invalidp else: if not re.match(RX_CERT, userproxy): raise InvalidParameter( "Retrieved malformed proxy from %s for %s and hash %s" % (myproxyserver, kwargs['userdn'], userhash)) else: proxy = Proxy(defaultDelegation) userproxy = proxy.getProxyFilename() kwargs['userproxy'] = userproxy out = func(*args, **kwargs) return out
def getProxy(userdn, group, role, defaultDelegation, logger): """ _getProxy_ """ logger.debug("Retrieving proxy for %s" % userdn) config = defaultDelegation config['userDN'] = userdn config['group'] = group config['role'] = role proxy = Proxy(defaultDelegation) proxyPath = proxy.getProxyFilename(True) timeleft = proxy.getTimeLeft(proxyPath) if timeleft is not None and timeleft > 3600: return (True, proxyPath) proxyPath = proxy.logonRenewMyProxy() timeleft = proxy.getTimeLeft(proxyPath) if timeleft is not None and timeleft > 0: return (True, proxyPath) return (False, None)
def getProxy(config, userdn, group, role): """ _getProxy_ """ defaultDelegation = getDefaultDelegation(config, "cms", "myproxy.cern.ch", threading.currentThread().logger) defaultDelegation['userDN'] = userdn defaultDelegation['group'] = group defaultDelegation['role'] = role logging.debug("Retrieving proxy for %s" % userdn) proxy = Proxy(defaultDelegation) proxyPath = proxy.getProxyFilename(True) timeleft = proxy.getTimeLeft(proxyPath) if timeleft is not None and timeleft > 3600: return (True, proxyPath) proxyPath = proxy.logonRenewMyProxy() timeleft = proxy.getTimeLeft(proxyPath) if timeleft is not None and timeleft > 0: return (True, proxyPath) return (False, None)
def createNewMyProxy(self, timeleftthreshold=0, nokey=False): """ Handles the MyProxy creation """ myproxy = Proxy ( self.defaultDelegation ) myproxy.userDN = myproxy.getSubject() myproxytimeleft = 0 self.logger.debug("Getting myproxy life time left for %s" % self.defaultDelegation["myProxySvr"]) # does it return an integer that indicates? myproxytimeleft = myproxy.getMyProxyTimeLeft(serverRenewer=True, nokey=nokey) self.logger.debug("Myproxy is valid: %i" % myproxytimeleft) if myproxytimeleft < timeleftthreshold or self.proxyChanged: # creating the proxy self.logger.debug("Delegating a myproxy for %s hours" % self.defaultDelegation['myproxyValidity'] ) try: myproxy.delegate(serverRenewer = True, nokey=nokey) self.logger.debug("My-proxy delegated.") except Exception, ex: raise ProxyCreationException("Problems delegating My-proxy. Problem %s"%ex)
def __init__(self, config): """ initialize properties specified from config """ BaseWorkerThread.__init__(self) # set the workqueue service for REST call self.config = config # need to get campaign, user, owner info self.agentInfo = initAgentInfo(self.config) self.summaryLevel = config.AnalyticsDataCollector.summaryLevel proxyArgs = {'logger': logging.getLogger()} self.proxy = Proxy(proxyArgs) self.proxyFile = self.proxy.getProxyFilename() # X509_USER_PROXY self.userCertFile = self.proxy.getUserCertFilename() # X509_USER_CERT # credential lifetime warning/error thresholds, in days self.credThresholds = { 'proxy': { 'error': 3, 'warning': 5 }, 'certificate': { 'error': 10, 'warning': 20 } } localWQUrl = config.AnalyticsDataCollector.localQueueURL self.workqueueDS = WorkQueueDS(localWQUrl) # Monitoring setup self.userAMQ = getattr(config.AgentStatusWatcher, "userAMQ", None) self.passAMQ = getattr(config.AgentStatusWatcher, "passAMQ", None) self.postToAMQ = getattr(config.AgentStatusWatcher, "enableAMQ", False) self.topicAMQ = getattr(config.AgentStatusWatcher, "topicAMQ", None) self.hostPortAMQ = getattr(config.AgentStatusWatcher, "hostPortAMQ", [('dashb-mb.cern.ch', 61113)])
def createNewVomsProxy(self, timeleftthreshold=0): """ Handles the proxy creation: - checks if a valid proxy still exists - performs the creation if it is expired """ ## TODO add the change to have user-cert/key defined in the config. userproxy = Proxy( self.defaultDelegation ) userproxy.userDN = userproxy.getSubject() proxytimeleft = 0 self.logger.debug("Getting proxy life time left") # does it return an integer that indicates? proxytimeleft = userproxy.getTimeLeft() self.logger.debug("Proxy is valid: %i" % proxytimeleft) #if it is not expired I check if role and/or group are changed if not proxytimeleft < timeleftthreshold and self.defaultDelegation['role']!=None and self.defaultDelegation['group']!=None: group , role = userproxy.getUserGroupAndRoleFromProxy( userproxy.getProxyFilename()) if group != self.defaultDelegation['group'] or role != self.defaultDelegation['role']: self.proxyChanged = True #if the proxy is expired, or we changed role and/or group, we need to create a new one if proxytimeleft < timeleftthreshold or self.proxyChanged: # creating the proxy self.logger.debug("Creating a proxy for %s hours" % self.defaultDelegation['proxyValidity'] ) userproxy.create() proxytimeleft = userproxy.getTimeLeft() group , role = userproxy.getUserGroupAndRoleFromProxy( userproxy.getProxyFilename()) if proxytimeleft > 0 and group == self.defaultDelegation['group'] and role == self.defaultDelegation['role']: self.logger.debug("Proxy created.") else: raise ProxyCreationException("Problems creating proxy.") return userproxy.getSubject( ), userproxy.getProxyFilename()
def createNewMyProxy(self, timeleftthreshold=0, nokey=False): """ Handles the MyProxy creation Let the following variables be timeleftthreshold: the proxy in myproxy should be delegated for at least this time (14 days) myproxytimeleft: current validity of your proxy in myproxy usercertDaysLeft: the number of days left before your user certificate expire myproxyDesiredValidity: delegate the proxy in myproxy for that time (30 days) If we need to renew the proxy in myproxy because its atributes has changed or because it is valid for less time than timeleftthreshold then we do it. Before doing that, we check when the user certificate is expiring. If it's within the timeleftthreshold (myproxytimeleft < timeleftthreshold) we delegate the proxy just for the time we need (checking first if we did not already do it since at some point usercertDaysLeft ~= myproxytimeleft and we don't need to delegate it at every command even though myproxytimeleft < timeleftthreshold). Note that a warning message is printed at every command it usercertDaysLeft < timeleftthreshold """ myproxy = Proxy(self.defaultDelegation) myproxy.userDN = myproxy.getSubjectFromCert(self.certLocation) myproxytimeleft = 0 self.logger.debug("Getting myproxy life time left for %s" % self.defaultDelegation["myProxySvr"]) # return an integer that indicates the number of seconds to the expiration of the proxy in myproxy myproxytimeleft = myproxy.getMyProxyTimeLeft(serverRenewer=True, nokey=nokey) self.logger.debug("Myproxy is valid: %i" % myproxytimeleft) trustRetrListChanged = myproxy.trustedRetrievers != self.defaultDelegation[ 'serverDN'] #list on the REST and on myproxy are different if myproxytimeleft < timeleftthreshold or self.proxyChanged or trustRetrListChanged: # checking the enddate of the user certificate usercertDaysLeft = myproxy.getUserCertEnddate() if usercertDaysLeft == 0: msg = "%sYOUR USER CERTIFICATE IS EXPIRED (OR WILL EXPIRE TODAY). YOU CANNOT USE THE CRAB3 CLIENT. PLEASE REQUEST A NEW CERTIFICATE HERE https://gridca.cern.ch/gridca/ AND SEE https://ca.cern.ch/ca/Help/?kbid=024010%s"\ % (colors.RED, colors.NORMAL) raise ProxyCreationException(msg) #if the certificate is going to expire print a warning. This is going to bre printed at every command if #the myproxytimeleft is inferior to the timeleftthreshold if usercertDaysLeft < self.myproxyDesiredValidity: self.logger.info("%sYour user certificate is going to expire in %s days. https://twiki.cern.ch/twiki/bin/view/CMSPublic/WorkBookStartingGrid#ObtainingCert %s"\ % (colors.RED, usercertDaysLeft, colors.NORMAL) ) #check if usercertDaysLeft ~= myproxytimeleft which means we already delegated the proxy for as long as we could if abs( usercertDaysLeft * 60 * 60 * 24 - myproxytimeleft ) < 60 * 60 * 24 and not trustRetrListChanged: #less than one day between usercertDaysLeft and myproxytimeleft return #adjust the myproxy delegation time accordingly to the user cert validity self.logger.info("%sDelegating your proxy for %s days instead of %s %s"\ % (colors.RED, usercertDaysLeft, self.myproxyDesiredValidity, colors.NORMAL) ) myproxy.myproxyValidity = "%i:00" % (usercertDaysLeft * 24) # creating the proxy self.logger.debug("Delegating a myproxy for %s hours" % myproxy.myproxyValidity) try: myproxy.delegate(serverRenewer=True, nokey=nokey) myproxytimeleft = myproxy.getMyProxyTimeLeft( serverRenewer=True, nokey=nokey) if myproxytimeleft <= 0: raise ProxyCreationException("It seems your proxy has not been delegated to myproxy. Please check the logfile for the exact error "+\ "(it might simply you typed a wrong password)") else: self.logger.debug("My-proxy delegated.") except Exception as ex: msg = ex._message if hasattr(ex, '_message') else str(ex) raise ProxyCreationException( "Problems delegating My-proxy. %s" % msg)
def __init__(self, confFile=None, quiet=False, debug=True, testMode=False): """ Initialise class members :arg WMCore.Configuration config: input Publisher configuration :arg bool quiet: it tells if a quiet logger is needed :arg bool debug: it tells if needs a verbose logger :arg bool testMode: it tells if to run in test (no subprocesses) mode. """ def createLogdir(dirname): """ Create the directory dirname ignoring erors in case it exists. Exit if the directory cannot be created. """ try: os.makedirs(dirname) except OSError as ose: if ose.errno != 17: #ignore the "Directory already exists error" print(str(ose)) print( "The Publisher Worker needs to access the '%s' directory" % dirname) sys.exit(1) def setRootLogger(logsDir, quiet=False, debug=True, console=False): """Sets the root logger with the desired verbosity level The root logger logs to logs/log.txt and every single logging instruction is propagated to it (not really nice to read) :arg bool quiet: it tells if a quiet logger is needed :arg bool debug: it tells if needs a verbose logger :arg bool console: it tells if to direct all printoput to console rather then files, useful for debug :return logger: a logger with the appropriate logger level.""" createLogdir(logsDir) createLogdir(os.path.join(logsDir, 'processes')) createLogdir(os.path.join(logsDir, 'tasks')) if console: # if we are testing log to the console is easier logging.getLogger().addHandler(logging.StreamHandler()) else: logHandler = MultiProcessingLog(os.path.join( logsDir, 'log.txt'), when='midnight') logFormatter = logging.Formatter( "%(asctime)s:%(levelname)s:%(module)s,%(lineno)d:%(message)s" ) logHandler.setFormatter(logFormatter) logging.getLogger().addHandler(logHandler) loglevel = logging.INFO if quiet: loglevel = logging.WARNING if debug: loglevel = logging.DEBUG logging.getLogger().setLevel(loglevel) logger = setMasterLogger() logger.debug("PID %s.", os.getpid()) logger.debug("Logging level initialized to %s.", loglevel) return logger def logVersionAndConfig(config=None, logger=None): """ log version number and major config. parameters args: config : a configuration object loaded from file args: logger : the logger instance to use """ pubstartDict = {} pubstartDict['version'] = __version__ pubstartDict['asoworker'] = config.General.asoworker pubstartDict['instance'] = config.General.instance if config.General.instance == 'other': pubstartDict['restHost'] = config.General.restHost pubstartDict['dbInstance'] = config.General.dbInstance pubstartDict['max_slaves'] = config.General.max_slaves pubstartDict['DBShost'] = config.TaskPublisher.DBShost pubstartDict['dryRun'] = config.TaskPublisher.dryRun # one line for automatic parsing logger.info('PUBSTART: %s', json.dumps(pubstartDict)) # multiple lines for humans to read for k, v in pubstartDict.items(): logger.info('%s: %s', k, v) return self.configurationFile = confFile # remember this, will have to pass it to TaskPublish config = loadConfigurationFile(confFile) self.config = config.General self.TPconfig = config.TaskPublisher # these are used for talking to DBS os.putenv('X509_USER_CERT', self.config.serviceCert) os.putenv('X509_USER_KEY', self.config.serviceKey) self.block_publication_timeout = self.config.block_closure_timeout self.lfn_map = {} self.force_publication = False self.force_failure = False self.TestMode = testMode self.taskFilesDir = self.config.taskFilesDir createLogdir(self.taskFilesDir) createLogdir(os.path.join(self.taskFilesDir, 'FailedBlocks')) self.logger = setRootLogger(self.config.logsDir, quiet=quiet, debug=debug, console=self.TestMode) logVersionAndConfig(config, self.logger) from WMCore.Credential.Proxy import Proxy proxy = Proxy({'logger': self.logger}) from ServerUtilities import tempSetLogLevel with tempSetLogLevel(self.logger, logging.ERROR): self.myDN = proxy.getSubjectFromCert( certFile=self.config.serviceCert) try: instance = self.config.instance except: msg = "No instance provided: need to specify config.General.instance in the configuration" raise ConfigException(msg) if instance in SERVICE_INSTANCES: self.logger.info('Will connect to CRAB service: %s', instance) restHost = SERVICE_INSTANCES[instance]['restHost'] dbInstance = SERVICE_INSTANCES[instance]['dbInstance'] else: msg = "Invalid instance value '%s'" % instance raise ConfigException(msg) if instance == 'other': self.logger.info( 'Will use restHost and dbInstance from config file') try: restHost = self.config.restHost dbInstance = self.config.dbInstance except: msg = "Need to specify config.General.restHost and dbInstance in the configuration" raise ConfigException(msg) self.logger.info( 'Will connect to CRAB Data Base %s instance via URL: https://%s', dbInstance, restHost) # CRAB REST API's self.max_files_per_block = self.config.max_files_per_block self.crabServer = CRABRest(hostname=restHost, localcert=self.config.serviceCert, localkey=self.config.serviceKey, retry=3, userAgent='CRABPublisher') self.crabServer.setDbInstance(dbInstance=dbInstance) self.startTime = time.time()
def get_proxy_from_MyProxy(self, ad): vo = 'cms' group = '' role = '' if 'CRAB_UserVO' in ad and ad['CRAB_UserVO']: vo = ad['CRAB_UserVO'] if 'CRAB_UserGroup' in ad and ad['CRAB_UserGroup'] and ad[ 'CRAB_UserGroup'] != classad.Value.Undefined: group = ad['CRAB_UserGroup'] if 'CRAB_UserRole' in ad and ad['CRAB_UserRole'] and ad[ 'CRAB_UserRole'] != classad.Value.Undefined: role = ad['CRAB_UserRole'] username = ad['CRAB_UserHN'] proxycfg = { 'vo': vo, 'logger': self.logger, 'myProxySvr': self.config.Services.MyProxy, 'proxyValidity': '144:0', 'min_time_left': MINPROXYLENGTH, ## do we need this ? or should we use self.myproxylen? 'userDN': ad['CRAB_UserDN'], 'userName': username + '_CRAB', 'group': group, 'role': role, 'server_key': self.config.MyProxy.serverhostkey, 'server_cert': self.config.MyProxy.serverhostcert, 'serverDN': 'dummy', # this is only used inside WMCore/Proxy.py functions not used by CRAB 'uisource': getattr(self.config.MyProxy, 'uisource', ''), 'credServerPath': self.config.MyProxy.credpath, 'cleanEnvironment': getattr(self.config.MyProxy, 'cleanEnvironment', False) } proxy = Proxy(proxycfg) userproxy = proxy.getProxyFilename(serverRenewer=True) # try first with new username_CRAB with tempSetLogLevel(logger=self.logger, level=logging.ERROR): proxy.logonRenewMyProxy() timeleft = proxy.getTimeLeft(userproxy) if not timeleft or timeleft <= 0: # if that fails, try with old fashioned DN hash del proxycfg['userName'] proxy = Proxy(proxycfg) with tempSetLogLevel(logger=self.logger, level=logging.ERROR): proxy.logonRenewMyProxy() timeleft = proxy.getTimeLeft(userproxy) if timeleft is None or timeleft <= 0: self.logger.error("Impossible to retrieve proxy from %s for %s.", proxycfg['myProxySvr'], proxycfg['userDN']) self.logger.error("repeat the command in verbose mode") proxycfg['userName'] = username + '_CRAB' proxy = Proxy(proxycfg) proxy.logonRenewMyProxy() raise Exception("Failed to retrieve proxy.") return userproxy
def query_database(self): cred = Proxy({'logger': logging.getLogger("WMCore")}) dbs = DASWrapper(self.dbs_instance, ca_info=cred.getProxyFilename()) baseinfo = dbs.listFileSummaries(dataset=self.dataset) if baseinfo is None or (len(baseinfo) == 1 and baseinfo[0] is None): raise ValueError( 'unable to retrive information for dataset {}'.format( self.dataset)) if not self.file_based: result = self.__cache.cached(self.dataset, self.lumi_mask, baseinfo) if result: return result total_lumis = sum([info['num_lumi'] for info in baseinfo]) result = DatasetInfo() result.total_events = sum([info['num_event'] for info in baseinfo]) for info in dbs.listFiles(dataset=self.dataset, detail=True): fn = info['logical_file_name'] result.files[fn].events = info['event_count'] result.files[fn].size = info['file_size'] if self.file_based: for info in dbs.listFiles(dataset=self.dataset): fn = info['logical_file_name'] result.files[fn].lumis = [(-2, -2)] else: blocks = dbs.listBlocks(dataset=self.dataset) if self.lumi_mask: unmasked_lumis = LumiList(filename=self.lumi_mask) for block in blocks: runs = dbs.listFileLumis(block_name=block['block_name']) for run in runs: fn = run['logical_file_name'] for lumi in run['lumi_section_num']: if not self.lumi_mask or ((run['run_num'], lumi) in unmasked_lumis): result.files[fn].lumis.append( (run['run_num'], lumi)) elif self.lumi_mask and ((run['run_num'], lumi) not in unmasked_lumis): result.masked_units += 1 result.unmasked_units = sum( [len(f.lumis) for f in result.files.values()]) result.total_units = result.unmasked_units + result.masked_units if not self.file_based: self.__cache.cache(self.dataset, self.lumi_mask, baseinfo, result) result.stop_on_file_boundary = (result.total_units != total_lumis) and not self.file_based if result.stop_on_file_boundary: logger.debug("split lumis detected in {} - " "{} unique (run, lumi) but " "{} unique (run, lumi, file) - " "enforcing a limit of one file per task".format( self.dataset, total_lumis, result.total_units)) return result
def __init__(self, config): BasePlugin.__init__(self, config) self.locationDict = {} myThread = threading.currentThread() daoFactory = DAOFactory(package="WMCore.WMBS", logger=myThread.logger, dbinterface=myThread.dbi) self.locationAction = daoFactory(classname="Locations.GetSiteInfo") self.packageDir = None # if agent is running in a container, Unpacker.py must come from a directory # on the host so the condor schedd can see it # config.General.workDir should always be bind mounted to the container if getattr(config.Agent, "isDocker", False): unpackerPath = os.path.join(config.General.workDir + "/Docker/WMRuntime/Unpacker.py") else: unpackerPath = os.path.join( getWMBASE(), 'src/python/WMCore/WMRuntime/Unpacker.py') if os.path.exists(unpackerPath): self.unpacker = unpackerPath else: self.unpacker = os.path.join(getWMBASE(), 'WMCore/WMRuntime/Unpacker.py') self.agent = getattr(config.Agent, 'agentName', 'WMAgent') self.sandbox = None self.scriptFile = config.JobSubmitter.submitScript self.defaultTaskPriority = getattr(config.BossAir, 'defaultTaskPriority', 0) self.maxTaskPriority = getattr(config.BossAir, 'maxTaskPriority', 1e7) self.jobsPerSubmit = getattr(config.JobSubmitter, 'jobsPerSubmit', 200) self.extraMem = getattr(config.JobSubmitter, 'extraMemoryPerCore', 500) # Required for global pool accounting self.acctGroup = getattr(config.BossAir, 'acctGroup', "production") self.acctGroupUser = getattr(config.BossAir, 'acctGroupUser', "cmsdataops") # Build a requirement string. All CMS resources match DESIRED_Sites on the START # expression side; however, there are currently some resources (T2_CH_CERN_HLT) # that are missing the REQUIRED_OS logic. Hence, we duplicate it here. # TODO(bbockelm): Remove reqStr once HLT has upgraded. self.reqStr = ( '((REQUIRED_OS=?="any") || ' '(GLIDEIN_REQUIRED_OS =?= "any") || ' 'stringListMember(GLIDEIN_REQUIRED_OS, REQUIRED_OS)) && ' '(AuthenticatedIdentity =!= "*****@*****.**")') if hasattr(config.BossAir, 'condorRequirementsString'): self.reqStr = config.BossAir.condorRequirementsString # x509 proxy handling proxy = Proxy({'logger': myThread.logger}) self.x509userproxy = proxy.getProxyFilename() # These are added now by the condor client #self.x509userproxysubject = proxy.getSubject() #self.x509userproxyfqan = proxy.getAttributeFromProxy(self.x509userproxy) return
'server_key': serverKey, 'server_cert': serverCert,} timeleftthreshold = 60 * 60 * 24 mypclient = SimpleMyProxy(defaultDelegation) userproxy = None userhash = sha1(kwargs['userdn']).hexdigest() if serverDN: try: userproxy = mypclient.logonRenewMyProxy(username=userhash, myproxyserver=myproxyserver, myproxyport=7512) except MyProxyException, me: # Unsure if this works in standalone mode... cherrypy.log(str(me)) cherrypy.log(str(serverKey)) cherrypy.log(str(serverCert)) invalidp = InvalidParameter("Impossible to retrieve proxy from %s for %s and hash %s" % (myproxyserver, kwargs['userdn'], userhash)) setattr(invalidp, 'trace', str(me)) raise invalidp else: if not re.match(RX_CERT, userproxy): raise InvalidParameter("Retrieved malformed proxy from %s for %s and hash %s" % (myproxyserver, kwargs['userdn'], userhash)) else: proxy = Proxy(defaultDelegation) userproxy = proxy.getProxyFilename() kwargs['userproxy'] = userproxy out = func(*args, **kwargs) return out return wrapped_func
def createNewMyProxy2(self, timeleftthreshold=0, nokey=False): """ Handles the MyProxy creation. In this version the credential name will be simply <username>_CRAB like e.g. belforte_CRAB where username is the CERN username Let the following variables be timeleftthreshold: the proxy in myproxy should be delegated for at least this time (14 days) myproxytimeleft: current validity of your proxy in myproxy usercertDaysLeft: the number of days left before your user certificate expire myproxyDesiredValidity: delegate the proxy in myproxy for that time (30 days) If we need to renew the proxy in myproxy because its atributes has changed or because it is valid for less time than timeleftthreshold then we do it. Before doing that, we check when the user certificate is expiring. If it's within the timeleftthreshold (myproxytimeleft < timeleftthreshold) we delegate the proxy just for the time we need (checking first if we did not already do it since at some point usercertDaysLeft ~= myproxytimeleft and we don't need to delegate it at every command even though myproxytimeleft < timeleftthreshold). Note that a warning message is printed at every command it usercertDaysLeft < timeleftthreshold :returns a tupla with info in the credential in myprosxy: (credentialName, myproxytimeleft) credentialName : username to use in myproxy -l username myproxytimeleft: validity of the credential in seconds """ defaultDelegation = self.defaultDelegation defaultDelegation['myproxyAccount'] = None from CRABClient.UserUtilities import getUsername username = getUsername(proxyFile=self.proxyInfo['filename'], logger=self.logger) credentialName = username + '_CRAB' defaultDelegation['userName'] = credentialName myproxy = Proxy(defaultDelegation) #userDNFromCert = myproxy.getSubjectFromCert(self.certLocation) #if userDNFromCert: # myproxy.userDN = userDNFromCert myproxytimeleft = 0 self.logger.debug("Getting myproxy life time left for %s" % self.defaultDelegation["myProxySvr"]) # return an integer that indicates the number of seconds to the expiration of the proxy in myproxy # Also catch the exception in case WMCore encounters a problem with the proxy itself (one such case was #4532) try: myproxytimeleft = myproxy.getMyProxyTimeLeft(serverRenewer=True, nokey=nokey) except CredentialException as ex: msg = "WMCore could not computer valid time for credential %s .\n Error detail: " % credentialName msg += "%s" % str(ex._message) msg += "\nTry to remove old myproxy credentials as per https://twiki.cern.ch/twiki/bin/view/CMSPublic/CRAB3FAQ#crab_command_fails_with_Impossib" self.logger.error(msg) raise ProxyCreationException("no valid credential for %s" % credentialName) except Exception as ex: logging.exception( "Problems calculating proxy lifetime, logging stack trace and raising ProxyCreationException" ) # WMException may contain the _message attribute. Otherwise, take the exception as a string. msg = ex._message if hasattr(ex, "_message") else str(ex) # pylint: disable=protected-access, no-member raise ProxyCreationException( "Problems calculating the time left until the expiration of the proxy." + " Please reset your environment or contact [email protected] if the problem persists.\n%s" % msg) self.logger.debug("Myproxy is valid: %i", myproxytimeleft) trustRetrListChanged = myproxy.trustedRetrievers != self.defaultDelegation[ 'serverDN'] #list on the REST and on myproxy are different if myproxytimeleft < timeleftthreshold or self.proxyChanged or trustRetrListChanged: # checking the enddate of the user certificate usercertDaysLeft = myproxy.getUserCertEnddate() if usercertDaysLeft == 0: msg = "%sYOUR USER CERTIFICATE IS EXPIRED (OR WILL EXPIRE TODAY)." % colors.RED msg += " YOU CANNOT USE THE CRAB3 CLIENT." msg += " PLEASE REQUEST A NEW CERTIFICATE HERE https://gridca.cern.ch/gridca/" msg += " AND SEE https://ca.cern.ch/ca/Help/?kbid=024010%s" % colors.NORMAL raise ProxyCreationException(msg) #if the certificate is going to expire print a warning. This is going to bre printed at every command if #the myproxytimeleft is inferior to the timeleftthreshold if usercertDaysLeft < self.myproxyDesiredValidity: msg = "%sYour user certificate is going to expire in %s days." % ( colors.RED, usercertDaysLeft) msg += " See: https://twiki.cern.ch/twiki/bin/view/CMSPublic/WorkBookStartingGrid#ObtainingCert %s" % colors.NORMAL self.logger.info(msg) #check if usercertDaysLeft ~= myproxytimeleft which means we already delegated the proxy for as long as we could if abs( usercertDaysLeft * 60 * 60 * 24 - myproxytimeleft ) < 60 * 60 * 24 and not trustRetrListChanged: #less than one day between usercertDaysLeft and myproxytimeleft return (credentialName, myproxytimeleft) #adjust the myproxy delegation time accordingly to the user cert validity self.logger.info( "%sDelegating your proxy for %s days instead of %s %s", colors.RED, usercertDaysLeft, self.myproxyDesiredValidity, colors.NORMAL) myproxy.myproxyValidity = "%i:00" % (usercertDaysLeft * 24) # creating the proxy self.logger.debug("Delegating a myproxy for %s hours", myproxy.myproxyValidity) try: myproxy.delegate(serverRenewer=True, nokey=nokey) myproxytimeleft = myproxy.getMyProxyTimeLeft( serverRenewer=True, nokey=nokey) if myproxytimeleft <= 0: raise ProxyCreationException("It seems your proxy has not been delegated to myproxy. Please check the logfile for the exact error "+\ "(it might simply you typed a wrong password)") else: self.logger.debug("My-proxy delegated.") except Exception as ex: msg = ex._message if hasattr(ex, '_message') else str(ex) # pylint: disable=protected-access, no-member raise ProxyCreationException( "Problems delegating My-proxy. %s" % msg) return (credentialName, myproxytimeleft)