def handleMyProxy(self): """ check myproxy credential and delegate again it if necessary. takes no input and returns no output, bur raises exception if delegation failed """ if not self.cmdconf[ 'requiresREST']: # If the command doesn't contact the REST, we can't delegate the proxy. return if self.options.proxy: # if user passed a proxy as option we don't contact myproxy return if not self.options.proxy: # Get the DN of the task workers from the server. all_task_workers_dns = server_info(self.crabserver, subresource='delegatedn') for authorizedDNs in all_task_workers_dns['services']: self.credentialHandler.setRetrievers(authorizedDNs) self.logger.debug( "Registering user credentials on myproxy for %s" % authorizedDNs) try: (credentialName, myproxyTimeleft) = \ self.credentialHandler.createNewMyProxy(timeleftthreshold=60 * 60 * 24 * RENEW_MYPROXY_THRESHOLD) p1 = True msg1 = "Credential exists on myproxy: username: %s - validity: %s" %\ (credentialName, str(timedelta(seconds=myproxyTimeleft))) except Exception as ex: p1 = False msg1 = "Error trying to create credential:\n %s" % str(ex) if (not p1): from CRABClient.ClientExceptions import ProxyCreationException raise ProxyCreationException( "Problems delegating My-proxy.\n%s" % msg1) self.logger.debug("Result of myproxy credential check:\n %s", msg1)
def handleProxy(self, proxyOptsSetPlace): """ Init the user proxy, and delegate it if necessary. """ if not self.options.proxy: if self.cmdconf['initializeProxy']: self.proxy.setVOGroupVORole(self.voGroup, self.voRole) self.proxy.setMyProxyAccount(self.serverurl) self.proxyfilename = self.proxy.createNewVomsProxy(timeLeftThreshold = 720, \ doProxyGroupRoleCheck = self.cmdconf['doProxyGroupRoleCheck'], \ proxyCreatedByCRAB = self.proxyCreated, \ proxyOptsSetPlace = proxyOptsSetPlace) if self.cmdconf[ 'requiresREST']: ## If the command doesn't contact the REST, we can't delegate the proxy. self.proxy.myproxyAccount = self.serverurl baseurl = getUrl(self.instance, resource='info') ## Get the DN of the task workers from the server. all_task_workers_dns = server_info('delegatedn', self.serverurl, self.proxyfilename, baseurl) for serverdn in all_task_workers_dns['services']: self.proxy.setServerDN(serverdn) self.proxy.setMyProxyServer('myproxy.cern.ch') self.logger.debug( "Registering user credentials for server %s" % serverdn) self.proxy.createNewMyProxy(timeleftthreshold=60 * 60 * 24 * RENEW_MYPROXY_THRESHOLD, nokey=True) else: self.proxyfilename = self.options.proxy os.environ['X509_USER_PROXY'] = self.options.proxy self.logger.debug('Skipping proxy creation')
def __call__(self): proxy = CredentialInteractions('', '', '', '', self.logger) days = self.options.days proxy.setMyProxyValidity(int(days) * 24 * 60) # minutes timeLeftThreshold = int(days) * 24 * 60 * 60 # seconds self.logger.info("Checking credentials") # need an X509 proxy in order to talk with CRABServer to get list of myproxy authorized retrievers proxy.proxyInfo = proxy.createNewVomsProxy(timeLeftThreshold=720) proxyfilename = proxy.proxyInfo['filename'] serverurl = 'cmsweb.cern.ch' baseurl = '/crabserver/prod/info' alldns = server_info(subresource='delegatedn', serverurl=serverurl, proxyfilename=proxyfilename, baseurl=baseurl) for serverdn in alldns['services']: proxy.defaultDelegation['serverDN'] = serverdn proxy.defaultDelegation['myProxySvr'] = 'myproxy.cern.ch' self.logger.info("Registering user credentials for server %s" % serverdn) (credentialName, myproxyTimeleft) = proxy.createNewMyProxy(timeleftthreshold=timeLeftThreshold, nokey=True) self.logger.info("Credential exists on myproxy: username: %s - validity: %s", credentialName, str(timedelta(seconds=myproxyTimeleft))) (credentialName, myproxyTimeleft) = proxy.createNewMyProxy2(timeleftthreshold=timeLeftThreshold, nokey=True) self.logger.info("Credential exists on myproxy: username: %s - validity: %s", credentialName, str(timedelta(seconds=myproxyTimeleft))) return
def __call__(self): credentialHandler = CredentialInteractions(self.logger) days = self.options.days credentialHandler.setMyProxyValidity(int(days) * 24 * 60) # minutes # give a bit of slack to the threshold, avoid that repeating the c timeLeftThreshold = int(days - 1) * 24 * 60 * 60 # seconds self.logger.info("Checking credentials") # need an X509 proxy in order to talk with CRABServer to get list of myproxy authorized retrievers credentialHandler.createNewVomsProxy(timeLeftThreshold=720) alldns = server_info(crabserver=self.crabserver, subresource='delegatedn') for authorizedDNs in alldns['services']: credentialHandler.setRetrievers(authorizedDNs) self.logger.info("Registering user credentials in myproxy") (credentialName, myproxyTimeleft) = credentialHandler.createNewMyProxy( timeleftthreshold=timeLeftThreshold) self.logger.info( "Credential exists on myproxy: username: %s - validity: %s", credentialName, str(timedelta(seconds=myproxyTimeleft))) return
def handleProxy(self, proxyOptsSetPlace): """ Init the user proxy, and delegate it if necessary. """ if not self.options.proxy: if self.cmdconf['initializeProxy']: self.proxy.setVOGroupVORole(self.voGroup, self.voRole) self.proxy.setMyProxyAccount(self.serverurl) self.proxyfilename = self.proxy.createNewVomsProxy(timeLeftThreshold = 720, \ doProxyGroupRoleCheck = self.cmdconf['doProxyGroupRoleCheck'], \ proxyCreatedByCRAB = self.proxyCreated, \ proxyOptsSetPlace = proxyOptsSetPlace) if self.cmdconf['requiresREST']: ## If the command doesn't contact the REST, we can't delegate the proxy. self.proxy.myproxyAccount = self.serverurl baseurl = self.getUrl(self.instance, resource = 'info') ## Get the DN of the task workers from the server. all_task_workers_dns = server_info('delegatedn', self.serverurl, self.proxyfilename, baseurl) for serverdn in all_task_workers_dns['services']: self.proxy.setServerDN(serverdn) self.proxy.setMyProxyServer('myproxy.cern.ch') self.logger.debug("Registering user credentials for server %s" % serverdn) self.proxy.createNewMyProxy(timeleftthreshold = 60 * 60 * 24 * RENEW_MYPROXY_THRESHOLD, nokey = True) else: self.proxyfilename = self.options.proxy os.environ['X509_USER_PROXY'] = self.options.proxy self.logger.debug('Skipping proxy creation')
def __call__(self): self.logger.info('Getting the tarball hash key') tarballdir = glob.glob(self.requestarea+'/inputs/*.tgz') if len(tarballdir) != 1: self.logger.info('%sError%s: Could not find tarball or there is more than one tarball'% (colors.RED, colors.NORMAL)) raise ConfigurationException tarballdir = tarballdir[0] #checking task status self.logger.info('Checking task status') serverFactory = CRABClient.Emulator.getEmulator('rest') server = serverFactory(self.serverurl, self.proxyfilename, self.proxyfilename, version=__version__) dictresult, status, _ = server.get(self.uri, data = {'workflow': self.cachedinfo['RequestName'], 'verbose': 0}) dictresult = dictresult['result'][0] #take just the significant part if status != 200: msg = "Problem retrieving task status:\ninput: %s\noutput: %s\nreason: %s" % (str(self.cachedinfo['RequestName']), str(dictresult), str(reason)) raise RESTCommunicationException(msg) self.logger.info('Task status: %s' % dictresult['status']) accepstate = ['KILLED','FINISHED','FAILED','KILLFAILED', 'COMPLETED'] if dictresult['status'] not in accepstate: msg = ('%sError%s: Only tasks with these status can be purged: {0}'.format(accepstate) % (colors.RED, colors.NORMAL)) raise ConfigurationException(msg) #getting the cache url cacheresult = {} scheddresult = {} gsisshdict = {} if not self.options.scheddonly: baseurl = getUrl(self.instance, resource='info') cacheurl = server_info('backendurls', self.serverurl, self.proxyfilename, baseurl) cacheurl = cacheurl['cacheSSL'] cacheurldict = {'endpoint': cacheurl, 'pycurl': True} ufc = UserFileCache(cacheurldict) hashkey = ufc.checksum(tarballdir) self.logger.info('Tarball hashkey: %s' %hashkey) self.logger.info('Attempting to remove task file from crab server cache') try: ufcresult = ufc.removeFile(hashkey) except HTTPException, re: if re.headers.has_key('X-Error-Info') and 'Not such file' in re.headers['X-Error-Info']: self.logger.info('%sError%s: Failed to find task file in crab server cache; the file might have been already purged' % (colors.RED,colors.NORMAL)) raise HTTPException , re if ufcresult == '': self.logger.info('%sSuccess%s: Successfully removed task files from crab server cache' % (colors.GREEN, colors.NORMAL)) cacheresult = 'SUCCESS' else: self.logger.info('%sError%s: Failed to remove task files from crab server cache' % (colors.RED, colors.NORMAL)) cacheresult = 'FAILED'
def checkversion(self, baseurl = None): compatibleversion = server_info('version', self.serverurl, self.proxyfilename, baseurl) compatible = False for item in compatibleversion: if re.match(item, __version__): self.logger.debug("CRABClient version: %s Compatible" % __version__) compatible = True break if not compatible: self.logger.info("%sWarning%s: Incompatible CRABClient version \"%s\" " % (colors.RED, colors.NORMAL , __version__ )) self.logger.info("Server is saying that compatible versions are: %s" % [v.replace("\\", "") for v in compatibleversion])
def checkversion(self, baseurl = None): compatibleVersions = server_info('version', self.serverurl, self.proxyfilename, baseurl) for item in compatibleVersions: if re.match(item, __version__): self.logger.debug("CRABClient version: %s" % (__version__)) break else: msg = "%sWarning%s:" % (colors.RED, colors.NORMAL) msg += " Incompatible CRABClient version %s" % (__version__ ) msg += "\nServer is saying that compatible versions are: %s" % [v.replace("\\", "") for v in compatibleVersions] self.logger.info(msg)
def checkversion(self, baseurl = None): compatibleVersions = server_info('version', self.serverurl, self.proxyfilename, baseurl) for item in compatibleVersions: if re.match(item, __version__): self.logger.debug("CRABClient version: %s" % (__version__)) break else: msg = "%sWarning%s:" % (colors.RED, colors.NORMAL) msg += " Incompatible CRABClient version %s" % (__version__ ) msg += "\nServer is saying that compatible versions are: %s" % [v.replace("\\", "") for v in compatibleVersions] self.logger.info(msg)
def checkversion(self): compatibleVersions = server_info(crabserver=self.crabserver, subresource='version') for item in compatibleVersions: if re.match(item, __version__): self.logger.debug("CRABClient version: %s" % (__version__)) break else: msg = "%sWarning%s:" % (colors.RED, colors.NORMAL) msg += " Incompatible CRABClient version %s" % (__version__) msg += "\nServer is saying that compatible versions are: %s" % [ v.replace("\\", "") for v in compatibleVersions ] self.logger.info(msg)
def checkversion(self, baseurl=None): compatibleversion = server_info('version', self.serverurl, self.proxyfilename, baseurl) compatible = False for item in compatibleversion: if re.match(item, __version__): self.logger.debug("CRABClient version: %s Compatible" % __version__) compatible = True break if not compatible: self.logger.info( "%sWarning%s: Incompatible CRABClient version \"%s\" " % (colors.RED, colors.NORMAL, __version__)) self.logger.info( "Server is saying that compatible versions are: %s" % [v.replace("\\", "") for v in compatibleversion])
def __call__(self): self.logger.info('Getting the tarball hash key') tarballdir = glob.glob(self.requestarea+'/inputs/*.tgz') if len(tarballdir) != 1: self.logger.info('%sError%s: Could not find tarball or there is more than one tarball'% (colors.RED, colors.NORMAL)) raise ConfigurationException tarballdir = tarballdir[0] #checking task status self.logger.info('Checking task status') serverFactory = CRABClient.Emulator.getEmulator('rest') server = serverFactory(self.serverurl, self.proxyfilename, self.proxyfilename, version=__version__) dictresult, status, _ = server.get(self.uri, data = {'workflow': self.cachedinfo['RequestName'], 'verbose': 0}) dictresult = dictresult['result'][0] #take just the significant part if status != 200: msg = "Problem retrieving task status:\ninput: %s\noutput: %s\nreason: %s" % (str(self.cachedinfo['RequestName']), str(dictresult), str(reason)) raise RESTCommunicationException(msg) self.logger.info('Task status: %s' % dictresult['status']) accepstate = ['KILLED','FINISHED','FAILED','KILLFAILED', 'COMPLETED'] if dictresult['status'] not in accepstate: msg = ('%sError%s: Only tasks with these status can be purged: {0}'.format(accepstate) % (colors.RED, colors.NORMAL)) raise ConfigurationException(msg) #getting the cache url cacheresult = {} scheddresult = {} gsisshdict = {} if not self.options.scheddonly: baseurl = getUrl(self.instance, resource='info') cacheurl = server_info('backendurls', self.serverurl, self.proxyfilename, baseurl) cacheurl = cacheurl['cacheSSL'] cacheurldict = {'endpoint': cacheurl, 'pycurl': True} ufc = UserFileCache(cacheurldict) hashkey = ufc.checksum(tarballdir) self.logger.info('Tarball hashkey: %s' %hashkey) self.logger.info('Attempting to remove task file from crab server cache') try: ufcresult = ufc.removeFile(hashkey) except HTTPException as re: if re.headers.has_key('X-Error-Info') and 'Not such file' in re.headers['X-Error-Info']: self.logger.info('%sError%s: Failed to find task file in crab server cache; the file might have been already purged' % (colors.RED,colors.NORMAL)) raise HTTPException , re if ufcresult == '': self.logger.info('%sSuccess%s: Successfully removed task files from crab server cache' % (colors.GREEN, colors.NORMAL)) cacheresult = 'SUCCESS' else: self.logger.info('%sError%s: Failed to remove task files from crab server cache' % (colors.RED, colors.NORMAL)) cacheresult = 'FAILED' if not self.options.cacheonly: self.logger.info('Getting schedd address') baseurl=self.getUrl(self.instance, resource='info') try: scheddaddress = server_info('scheddaddress', self.serverurl, self.proxyfilename, baseurl, workflow = self.cachedinfo['RequestName'] ) except HTTPException as he: self.logger.info('%sError%s: Failed to get schedd address' % (colors.RED, colors.NORMAL)) raise HTTPException,he self.logger.debug('%sSuccess%s: Successfully got schedd address' % (colors.GREEN, colors.NORMAL)) self.logger.debug('Schedd address: %s' % scheddaddress) self.logger.info('Attempting to remove task from schedd') gssishrm = 'gsissh -o ConnectTimeout=60 -o PasswordAuthentication=no ' + scheddaddress + ' rm -rf ' + self.cachedinfo['RequestName'] self.logger.debug('gsissh command: %s' % gssishrm) delprocess=subprocess.Popen(gssishrm, stdout= subprocess.PIPE, stderr= subprocess.PIPE, shell=True) stdout, stderr = delprocess.communicate() exitcode = delprocess.returncode if exitcode == 0 : self.logger.info('%sSuccess%s: Successfully removed task from scehdd' % (colors.GREEN, colors.NORMAL)) scheddresult = 'SUCCESS' gsisshdict = {} else : self.logger.info('%sError%s: Failed to remove task from schedd' % (colors.RED, colors.NORMAL)) scheddaddress = 'FAILED' self.logger.debug('gsissh stdout: %s\ngsissh stderr: %s\ngsissh exitcode: %s' % (stdout,stderr,exitcode)) gsisshdict = {'stdout' : stdout, 'stderr' : stderr , 'exitcode' : exitcode} return {'cacheresult' : cacheresult , 'scheddresult' : scheddresult , 'gsiresult' : gsisshdict}
def __call__(self): valid = False configmsg = 'Default' self.logger.debug("Started submission") serverFactory = CRABClient.Emulator.getEmulator('rest') uniquerequestname = None self.logger.debug("Working on %s" % str(self.requestarea)) configreq = {'dryrun': 1 if self.options.dryrun else 0} for param in parametersMapping['on-server']: mustbetype = getattr(types, parametersMapping['on-server'][param]['type']) default = parametersMapping['on-server'][param]['default'] config_params = parametersMapping['on-server'][param]['config'] for config_param in config_params: attrs = config_param.split('.') temp = self.configuration for attr in attrs: temp = getattr(temp, attr, None) if temp is None: break if temp is not None: configreq[param] = temp break elif default is not None: configreq[param] = default temp = default else: ## Parameter not strictly required. pass ## Check that the requestname is of the right type. ## This is not checked in SubCommand.validateConfig(). if param == 'workflow': if mustbetype == type(self.requestname): configreq['workflow'] = self.requestname ## Translate boolean flags into integers. elif param in ['savelogsflag', 'publication', 'nonprodsw', 'useparent', 'ignorelocality', 'saveoutput', 'oneEventMode']: configreq[param] = 1 if temp else 0 ## Translate DBS URL aliases into DBS URLs. elif param in ['dbsurl', 'publishdbsurl']: if param == 'dbsurl': dbstype = 'reader' elif param == 'publishdbsurl': dbstype = 'writer' allowed_dbsurls = DBSURLS[dbstype].values() allowed_dbsurls_aliases = DBSURLS[dbstype].keys() if configreq[param] in allowed_dbsurls_aliases: configreq[param] = DBSURLS[dbstype][configreq[param]] elif configreq[param].rstrip('/') in allowed_dbsurls: configreq[param] = configreq[param].rstrip('/') elif param == 'scriptexe' and 'scriptexe' in configreq: configreq[param] = os.path.basename(configreq[param]) jobconfig = {} #get the backend URLs from the server external configuration serverBackendURLs = server_info('backendurls', self.serverurl, self.proxyfilename, getUrl(self.instance, resource='info')) #if cacheSSL is specified in the server external configuration we will use it to upload the sandbox (baseURL will be ignored) filecacheurl = serverBackendURLs['cacheSSL'] if 'cacheSSL' in serverBackendURLs else None pluginParams = [ self.configuration, self.logger, os.path.join(self.requestarea, 'inputs') ] crab_job_types = getJobTypes() if upper(configreq['jobtype']) in crab_job_types: plugjobtype = crab_job_types[upper(configreq['jobtype'])](*pluginParams) inputfiles, jobconfig, isbchecksum = plugjobtype.run(filecacheurl) else: fullname = configreq['jobtype'] basename = os.path.basename(fullname).split('.')[0] plugin = addPlugin(fullname)[basename] pluginInst = plugin(*pluginParams) inputfiles, jobconfig, isbchecksum = pluginInst.run() if configreq['publication']: non_edm_files = jobconfig['tfileoutfiles'] + jobconfig['addoutputfiles'] if non_edm_files: msg = "%sWarning%s: The following output files will not be published, as they are not EDM files: %s" % (colors.RED, colors.NORMAL, non_edm_files) self.logger.warning(msg) if not configreq['publishname']: configreq['publishname'] = isbchecksum else: configreq['publishname'] = "%s-%s" %(configreq['publishname'], isbchecksum) configreq.update(jobconfig) server = serverFactory(self.serverurl, self.proxyfilename, self.proxyfilename, version=__version__) self.logger.info("Sending the request to the server") self.logger.debug("Submitting %s " % str(configreq)) ## TODO: this shouldn't be hard-coded. listParams = ['adduserfiles', 'addoutputfiles', 'sitewhitelist', 'siteblacklist', 'blockwhitelist', 'blockblacklist', \ 'tfileoutfiles', 'edmoutfiles', 'runs', 'lumis', 'userfiles', 'scriptargs', 'extrajdl'] configreq_encoded = self._encodeRequest(configreq, listParams) self.logger.debug('Encoded submit request: %s' % (configreq_encoded)) dictresult, status, reason = server.put( self.uri, data = configreq_encoded) self.logger.debug("Result: %s" % dictresult) if status != 200: msg = "Problem sending the request:\ninput:%s\noutput:%s\nreason:%s" % (str(configreq), str(dictresult), str(reason)) raise RESTCommunicationException(msg) elif dictresult.has_key("result"): uniquerequestname = dictresult["result"][0]["RequestName"] else: msg = "Problem during submission, no request ID returned:\ninput:%s\noutput:%s\nreason:%s" \ % (str(configreq), str(dictresult), str(reason)) raise RESTCommunicationException(msg) tmpsplit = self.serverurl.split(':') createCache(self.requestarea, tmpsplit[0], tmpsplit[1] if len(tmpsplit)>1 else '', uniquerequestname, voRole=self.voRole, voGroup=self.voGroup, instance=self.instance, originalConfig = self.configuration) self.logger.info("%sSuccess%s: Your task has been delivered to the CRAB3 server." %(colors.GREEN, colors.NORMAL)) if not (self.options.wait or self.options.dryrun): self.logger.info("Task name: %s" % uniquerequestname) self.logger.info("Please use 'crab status' to check how the submission process proceeds.") else: targetTaskStatus = 'UPLOADED' if self.options.dryrun else 'SUBMITTED' self.checkStatusLoop(server, uniquerequestname, targetTaskStatus) if self.options.dryrun: self.printDryRunResults(*self.executeTestRun(filecacheurl)) self.logger.debug("About to return") return {'requestname' : self.requestname , 'uniquerequestname' : uniquerequestname }
class purge(SubCommand): """ clean user schedd and cache for a given task. User must specify the taskname to be purge """ visible = True def __call__(self): self.logger.info('Getting the tarball hash key') tarballdir = glob.glob(self.requestarea + '/inputs/*.tgz') if len(tarballdir) != 1: self.logger.info( '%sError%s: Could not find tarball or there is more than one tarball' % (colors.RED, colors.NORMAL)) raise ConfigurationException tarballdir = tarballdir[0] #checking task status self.logger.info('Checking task status') serverFactory = CRABClient.Emulator.getEmulator('rest') server = serverFactory(self.serverurl, self.proxyfilename, self.proxyfilename, version=__version__) dictresult, status, _ = server.get(self.uri, data={ 'workflow': self.cachedinfo['RequestName'], 'verbose': 0 }) dictresult = dictresult['result'][0] #take just the significant part if status != 200: msg = "Problem retrieving task status:\ninput: %s\noutput: %s\nreason: %s" % ( str(self.cachedinfo['RequestName']), str(dictresult), str(reason)) raise RESTCommunicationException(msg) self.logger.info('Task status: %s' % dictresult['status']) accepstate = [ 'KILLED', 'FINISHED', 'FAILED', 'KILLFAILED', 'COMPLETED' ] if dictresult['status'] not in accepstate: msg = ('%sError%s: Only tasks with these status can be purged: {0}' .format(accepstate) % (colors.RED, colors.NORMAL)) raise ConfigurationException(msg) #getting the cache url cacheresult = {} scheddresult = {} gsisshdict = {} if not self.options.scheddonly: baseurl = getUrl(self.instance, resource='info') cacheurl = server_info('backendurls', self.serverurl, self.proxyfilename, baseurl) cacheurl = cacheurl['cacheSSL'] cacheurldict = {'endpoint': cacheurl, 'pycurl': True} ufc = UserFileCache(cacheurldict) hashkey = ufc.checksum(tarballdir) self.logger.info('Tarball hashkey: %s' % hashkey) self.logger.info( 'Attempting to remove task file from crab server cache') try: ufcresult = ufc.removeFile(hashkey) except HTTPException, re: if re.headers.has_key( 'X-Error-Info' ) and 'Not such file' in re.headers['X-Error-Info']: self.logger.info( '%sError%s: Failed to find task file in crab server cache; the file might have been already purged' % (colors.RED, colors.NORMAL)) raise HTTPException, re if ufcresult == '': self.logger.info( '%sSuccess%s: Successfully removed task files from crab server cache' % (colors.GREEN, colors.NORMAL)) cacheresult = 'SUCCESS' else: self.logger.info( '%sError%s: Failed to remove task files from crab server cache' % (colors.RED, colors.NORMAL)) cacheresult = 'FAILED' if not self.options.cacheonly: self.logger.info('Getting schedd address') baseurl = self.getUrl(self.instance, resource='info') try: scheddaddress = server_info( 'scheddaddress', self.serverurl, self.proxyfilename, baseurl, workflow=self.cachedinfo['RequestName']) except HTTPException, he: self.logger.info('%sError%s: Failed to get schedd address' % (colors.RED, colors.NORMAL)) raise HTTPException, he self.logger.debug('%sSuccess%s: Successfully got schedd address' % (colors.GREEN, colors.NORMAL)) self.logger.debug('Schedd address: %s' % scheddaddress) self.logger.info('Attempting to remove task from schedd') gssishrm = 'gsissh -o ConnectTimeout=60 -o PasswordAuthentication=no ' + scheddaddress + ' rm -rf ' + self.cachedinfo[ 'RequestName'] self.logger.debug('gsissh command: %s' % gssishrm) delprocess = subprocess.Popen(gssishrm, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) stdout, stderr = delprocess.communicate() exitcode = delprocess.returncode if exitcode == 0: self.logger.info( '%sSuccess%s: Successfully removed task from scehdd' % (colors.GREEN, colors.NORMAL)) scheddresult = 'SUCCESS' gsisshdict = {} else: self.logger.info( '%sError%s: Failed to remove task from schedd' % (colors.RED, colors.NORMAL)) scheddaddress = 'FAILED' self.logger.debug( 'gsissh stdout: %s\ngsissh stderr: %s\ngsissh exitcode: %s' % (stdout, stderr, exitcode)) gsisshdict = { 'stdout': stdout, 'stderr': stderr, 'exitcode': exitcode } return { 'cacheresult': cacheresult, 'scheddresult': scheddresult, 'gsiresult': gsisshdict }
def __call__(self): self.logger.info('Getting the tarball hash key') tarballdir = glob.glob(self.requestarea + '/inputs/*.tgz') if len(tarballdir) != 1: self.logger.info( '%sError%s: Could not find tarball or there is more than one tarball' % (colors.RED, colors.NORMAL)) raise ConfigurationException tarballdir = tarballdir[0] #checking task status self.logger.info('Checking task status') serverFactory = CRABClient.Emulator.getEmulator('rest') server = serverFactory(self.serverurl, self.proxyfilename, self.proxyfilename, version=__version__) dictresult, status, _ = server.get(self.uri, data={ 'workflow': self.cachedinfo['RequestName'], 'verbose': 0 }) dictresult = dictresult['result'][0] #take just the significant part if status != 200: msg = "Problem retrieving task status:\ninput: %s\noutput: %s\nreason: %s" % ( str(self.cachedinfo['RequestName']), str(dictresult), str(reason)) raise RESTCommunicationException(msg) self.logger.info('Task status: %s' % dictresult['status']) accepstate = [ 'KILLED', 'FINISHED', 'FAILED', 'KILLFAILED', 'COMPLETED' ] if dictresult['status'] not in accepstate: msg = ('%sError%s: Only tasks with these status can be purged: {0}' .format(accepstate) % (colors.RED, colors.NORMAL)) raise ConfigurationException(msg) #getting the cache url cacheresult = {} scheddresult = {} gsisshdict = {} if not self.options.scheddonly: baseurl = getUrl(self.instance, resource='info') cacheurl = server_info('backendurls', self.serverurl, self.proxyfilename, baseurl) cacheurl = cacheurl['cacheSSL'] cacheurldict = {'endpoint': cacheurl, 'pycurl': True} ufc = UserFileCache(cacheurldict) hashkey = ufc.checksum(tarballdir) self.logger.info('Tarball hashkey: %s' % hashkey) self.logger.info( 'Attempting to remove task file from crab server cache') try: ufcresult = ufc.removeFile(hashkey) except HTTPException, re: if re.headers.has_key( 'X-Error-Info' ) and 'Not such file' in re.headers['X-Error-Info']: self.logger.info( '%sError%s: Failed to find task file in crab server cache; the file might have been already purged' % (colors.RED, colors.NORMAL)) raise HTTPException, re if ufcresult == '': self.logger.info( '%sSuccess%s: Successfully removed task files from crab server cache' % (colors.GREEN, colors.NORMAL)) cacheresult = 'SUCCESS' else: self.logger.info( '%sError%s: Failed to remove task files from crab server cache' % (colors.RED, colors.NORMAL)) cacheresult = 'FAILED'
def __call__(self): self.logger.debug("Started submission") serverFactory = CRABClient.Emulator.getEmulator('rest') uniquerequestname = None self.logger.debug("Working on %s" % str(self.requestarea)) self.configreq = {'dryrun': 1 if self.options.dryrun else 0} for param in parametersMapping['on-server']: mustbetype = getattr(types, parametersMapping['on-server'][param]['type']) default = parametersMapping['on-server'][param]['default'] config_params = parametersMapping['on-server'][param]['config'] for config_param in config_params: attrs = config_param.split('.') temp = self.configuration for attr in attrs: temp = getattr(temp, attr, None) if temp is None: break if temp is not None: self.configreq[param] = temp break elif default is not None: self.configreq[param] = default temp = default else: ## Parameter not strictly required. pass ## Check that the requestname is of the right type. ## This is not checked in SubCommand.validateConfig(). if param == 'workflow': if isinstance(self.requestname, mustbetype): self.configreq['workflow'] = self.requestname ## Translate boolean flags into integers. elif param in ['savelogsflag', 'publication', 'publishgroupname', 'nonprodsw', 'useparent',\ 'ignorelocality', 'saveoutput', 'oneEventMode', 'nonvaliddata', 'ignoreglobalblacklist']: self.configreq[param] = 1 if temp else 0 ## Translate DBS URL aliases into DBS URLs. elif param in ['dbsurl', 'publishdbsurl']: if param == 'dbsurl': dbstype = 'reader' elif param == 'publishdbsurl': dbstype = 'writer' allowed_dbsurls = DBSURLS[dbstype].values() allowed_dbsurls_aliases = DBSURLS[dbstype].keys() if self.configreq[param] in allowed_dbsurls_aliases: self.configreq[param] = DBSURLS[dbstype][ self.configreq[param]] elif self.configreq[param].rstrip('/') in allowed_dbsurls: self.configreq[param] = self.configreq[param].rstrip('/') elif param == 'scriptexe' and 'scriptexe' in self.configreq: self.configreq[param] = os.path.basename(self.configreq[param]) jobconfig = {} #get the backend URLs from the server external configuration serverBackendURLs = server_info('backendurls', self.serverurl, self.proxyfilename, getUrl(self.instance, resource='info')) #if cacheSSL is specified in the server external configuration we will use it to upload the sandbox filecacheurl = serverBackendURLs[ 'cacheSSL'] if 'cacheSSL' in serverBackendURLs else None pluginParams = [ self.configuration, self.proxyfilename, self.logger, os.path.join(self.requestarea, 'inputs') ] crab_job_types = getJobTypes() if upper(self.configreq['jobtype']) in crab_job_types: plugjobtype = crab_job_types[upper( self.configreq['jobtype'])](*pluginParams) dummy_inputfiles, jobconfig = plugjobtype.run(filecacheurl) else: fullname = self.configreq['jobtype'] basename = os.path.basename(fullname).split('.')[0] plugin = addPlugin(fullname)[basename] pluginInst = plugin(*pluginParams) dummy_inputfiles, jobconfig = pluginInst.run() if self.configreq['publication']: non_edm_files = jobconfig['tfileoutfiles'] + jobconfig[ 'addoutputfiles'] if non_edm_files: msg = "%sWarning%s: The following output files will not be published, as they are not EDM files: %s" % ( colors.RED, colors.NORMAL, non_edm_files) self.logger.warning(msg) self.configreq.update(jobconfig) server = serverFactory(self.serverurl, self.proxyfilename, self.proxyfilename, version=__version__) self.logger.info("Sending the request to the server at %s" % self.serverurl) self.logger.debug("Submitting %s " % str(self.configreq)) ## TODO: this shouldn't be hard-coded. listParams = ['addoutputfiles', 'sitewhitelist', 'siteblacklist', 'blockwhitelist', 'blockblacklist', \ 'tfileoutfiles', 'edmoutfiles', 'runs', 'lumis', 'userfiles', 'scriptargs', 'extrajdl'] self.configreq_encoded = self._encodeRequest(self.configreq, listParams) self.logger.debug('Encoded submit request: %s' % (self.configreq_encoded)) dictresult, status, reason = server.put(self.uri, data=self.configreq_encoded) self.logger.debug("Result: %s" % dictresult) if status != 200: msg = "Problem sending the request:\ninput:%s\noutput:%s\nreason:%s" % ( str(self.configreq), str(dictresult), str(reason)) raise RESTCommunicationException(msg) elif 'result' in dictresult: uniquerequestname = dictresult["result"][0]["RequestName"] else: msg = "Problem during submission, no request ID returned:\ninput:%s\noutput:%s\nreason:%s" \ % (str(self.configreq), str(dictresult), str(reason)) raise RESTCommunicationException(msg) tmpsplit = self.serverurl.split(':') createCache(self.requestarea, tmpsplit[0], tmpsplit[1] if len(tmpsplit) > 1 else '', uniquerequestname, voRole=self.voRole, voGroup=self.voGroup, instance=self.instance, originalConfig=self.configuration) self.logger.info( "%sSuccess%s: Your task has been delivered to the %s CRAB3 server." % (colors.GREEN, colors.NORMAL, self.instance)) if not (self.options.wait or self.options.dryrun): self.logger.info("Task name: %s" % uniquerequestname) projDir = os.path.join( getattr(self.configuration.General, 'workArea', '.'), self.requestname) self.logger.info("Project dir: %s" % projDir) self.logger.info( "Please use 'crab status -d %s' to check how the submission process proceeds.", projDir) else: targetTaskStatus = 'UPLOADED' if self.options.dryrun else 'SUBMITTED' checkStatusLoop(self.logger, server, self.uri, uniquerequestname, targetTaskStatus, self.name) if self.options.dryrun: self.printDryRunResults(*self.executeTestRun(filecacheurl)) self.logger.debug("About to return") return { 'requestname': self.requestname, 'uniquerequestname': uniquerequestname }
def __call__(self): self.logger.info('Getting the tarball hash key') inputlist = {'subresource': 'search', 'workflow': self.cachedinfo['RequestName']} serverFactory = CRABClient.Emulator.getEmulator('rest') server = serverFactory(self.serverurl, self.proxyfilename, self.proxyfilename, version=__version__) uri = getUrl(self.instance, resource = 'task') dictresult, _, _ = server.get(uri, data = inputlist) tm_user_sandbox = getColumn(dictresult, 'tm_user_sandbox') hashkey = tm_user_sandbox.replace(".tar.gz","") # Get the schedd address from the DB info and strip off the 'crab3@' prefix if it exists scheddaddress = getColumn(dictresult, 'tm_schedd') scheddaddress = scheddaddress.split('@')[1] if '@' in scheddaddress else scheddaddress self.logger.info('Checking task status') serverFactory = CRABClient.Emulator.getEmulator('rest') server = serverFactory(self.serverurl, self.proxyfilename, self.proxyfilename, version=__version__) dictresult, _, _ = server.get(self.uri, data = {'workflow': self.cachedinfo['RequestName'], 'verbose': 0}) dictresult = dictresult['result'][0] #take just the significant part self.logger.info('Task status: %s' % dictresult['status']) accepstate = ['SUBMITFAILED','KILLED','FINISHED','FAILED','KILLFAILED', 'COMPLETED'] if dictresult['status'] not in accepstate: msg = ('%sError%s: Only tasks with these status can be purged: {0}'.format(accepstate) % (colors.RED, colors.NORMAL)) raise ConfigurationException(msg) #getting the cache url cacheresult = {} scheddresult = {} gsisshdict = {} if not self.options.scheddonly: baseurl = getUrl(self.instance, resource='info') cacheurl = server_info('backendurls', self.serverurl, self.proxyfilename, baseurl) cacheurl = cacheurl['cacheSSL'] cacheurldict = {'endpoint': cacheurl, 'pycurl': True} ufc = UserFileCache(cacheurldict) self.logger.info('Tarball hashkey: %s' %hashkey) self.logger.info('Attempting to remove task file from crab server cache') try: ufcresult = ufc.removeFile(hashkey) except HTTPException as re: if 'X-Error-Info' in re.headers and 'Not such file' in re.headers['X-Error-Info']: self.logger.info('%sError%s: Failed to find task file in crab server cache; the file might have been already purged' % (colors.RED,colors.NORMAL)) raise if ufcresult == '': self.logger.info('%sSuccess%s: Successfully removed task files from crab server cache' % (colors.GREEN, colors.NORMAL)) cacheresult = 'SUCCESS' else: self.logger.info('%sError%s: Failed to remove task files from crab server cache' % (colors.RED, colors.NORMAL)) cacheresult = 'FAILED' if not self.options.cacheonly: self.logger.debug('%sSuccess%s: Successfully got schedd address' % (colors.GREEN, colors.NORMAL)) self.logger.debug('Schedd address: %s' % scheddaddress) self.logger.info('Attempting to remove task from schedd') gssishrm = 'gsissh -o ConnectTimeout=60 -o PasswordAuthentication=no ' + scheddaddress + ' rm -rf ' + self.cachedinfo['RequestName'] self.logger.debug('gsissh command: %s' % gssishrm) delprocess=subprocess.Popen(gssishrm, stdout= subprocess.PIPE, stderr= subprocess.PIPE, shell=True) stdout, stderr = delprocess.communicate() exitcode = delprocess.returncode if exitcode == 0 : self.logger.info('%sSuccess%s: Successfully removed task from schedd' % (colors.GREEN, colors.NORMAL)) scheddresult = 'SUCCESS' gsisshdict = {} else : self.logger.info('%sError%s: Failed to remove task from schedd' % (colors.RED, colors.NORMAL)) scheddaddress = 'FAILED' self.logger.debug('gsissh stdout: %s\ngsissh stderr: %s\ngsissh exitcode: %s' % (stdout,stderr,exitcode)) gsisshdict = {'stdout' : stdout, 'stderr' : stderr , 'exitcode' : exitcode} return {'cacheresult' : cacheresult , 'scheddresult' : scheddresult , 'gsiresult' : gsisshdict}
def handleProxy(self, proxyOptsSetPlace): """ Init the user proxy, and delegate it if necessary. """ if not self.options.proxy: if self.cmdconf['initializeProxy']: self.proxy.setVOGroupVORole(self.voGroup, self.voRole) self.proxy.proxyInfo = self.proxy.createNewVomsProxy(timeLeftThreshold=720, \ doProxyGroupRoleCheck=self.cmdconf['doProxyGroupRoleCheck'], \ proxyCreatedByCRAB=self.proxyCreated, \ proxyOptsSetPlace=proxyOptsSetPlace) self.proxyfilename = self.proxy.proxyInfo['filename'] if self.cmdconf[ 'requiresREST']: ## If the command doesn't contact the REST, we can't delegate the proxy. baseurl = getUrl(self.instance, resource='info') ## Get the DN of the task workers from the server. all_task_workers_dns = server_info( subresource='delegatedn', serverurl=self.serverurl, proxyfilename=self.proxyfilename, baseurl=baseurl, logger=self.logger) for serverdn in all_task_workers_dns['services']: self.proxy.setServerDN(serverdn) self.proxy.setMyProxyServer('myproxy.cern.ch') self.logger.debug( "Registering user credentials for server %s" % serverdn) try: (credentialName, myproxyTimeleft) = self.proxy.createNewMyProxy( timeleftthreshold=60 * 60 * 24 * RENEW_MYPROXY_THRESHOLD, nokey=True) p1 = True msg1 = "Credential exists on myproxy: username: %s - validity: %s" %\ (credentialName, str(timedelta(seconds=myproxyTimeleft))) except Exception as ex: p1 = False msg1 = "Error trying to create credential:\n %s" % str( ex) try: (credentialName, myproxyTimeleft) = self.proxy.createNewMyProxy2( timeleftthreshold=60 * 60 * 24 * RENEW_MYPROXY_THRESHOLD, nokey=True) p2 = True msg2 = "Credential exists on myproxy: username: %s - validity: %s" %\ (credentialName, str(timedelta(seconds=myproxyTimeleft))) except Exception as ex: p2 = False msg2 = "Error trying to create credential:\n %s" % str( ex) if (not p1) and (not p2): from CRABClient.ClientExceptions import ProxyCreationException raise ProxyCreationException( "Problems delegating My-proxy.\n%s\n%s" % (msg1, msg2)) self.logger.debug( "Result of myproxy credential check:\n %s\n %s", msg1, msg2) else: self.proxyfilename = self.options.proxy os.environ['X509_USER_PROXY'] = self.options.proxy self.logger.debug('Skipping proxy creation')
def __call__(self): self.logger.info('Getting the tarball hash key') inputlist = {'subresource': 'search', 'workflow': self.cachedinfo['RequestName']} serverFactory = CRABClient.Emulator.getEmulator('rest') server = serverFactory(self.serverurl, self.proxyfilename, self.proxyfilename, version=__version__) uri = self.getUrl(self.instance, resource = 'task') dictresult, _, _ = server.get(uri, data = inputlist) tm_user_sandbox = getColumn(dictresult, 'tm_user_sandbox') hashkey = tm_user_sandbox.replace(".tar.gz","") # Get the schedd address from the DB info and strip off the 'crab3@' prefix if it exists scheddaddress = getColumn(dictresult, 'tm_schedd') scheddaddress = scheddaddress.split('@')[1] if '@' in scheddaddress else scheddaddress self.logger.info('Checking task status') serverFactory = CRABClient.Emulator.getEmulator('rest') server = serverFactory(self.serverurl, self.proxyfilename, self.proxyfilename, version=__version__) dictresult, _, _ = server.get(self.uri, data = {'workflow': self.cachedinfo['RequestName'], 'verbose': 0}) dictresult = dictresult['result'][0] #take just the significant part self.logger.info('Task status: %s' % dictresult['status']) accepstate = ['SUBMITFAILED','KILLED','FINISHED','FAILED','KILLFAILED', 'COMPLETED'] if dictresult['status'] not in accepstate: msg = ('%sError%s: Only tasks with these status can be purged: {0}'.format(accepstate) % (colors.RED, colors.NORMAL)) raise ConfigurationException(msg) #getting the cache url cacheresult = {} scheddresult = {} gsisshdict = {} if not self.options.scheddonly: baseurl = getUrl(self.instance, resource='info') cacheurl = server_info('backendurls', self.serverurl, self.proxyfilename, baseurl) cacheurl = cacheurl['cacheSSL'] cacheurldict = {'endpoint': cacheurl, 'pycurl': True} ufc = UserFileCache(cacheurldict) self.logger.info('Tarball hashkey: %s' %hashkey) self.logger.info('Attempting to remove task file from crab server cache') try: ufcresult = ufc.removeFile(hashkey) except HTTPException as re: if 'X-Error-Info' in re.headers and 'Not such file' in re.headers['X-Error-Info']: self.logger.info('%sError%s: Failed to find task file in crab server cache; the file might have been already purged' % (colors.RED,colors.NORMAL)) raise if ufcresult == '': self.logger.info('%sSuccess%s: Successfully removed task files from crab server cache' % (colors.GREEN, colors.NORMAL)) cacheresult = 'SUCCESS' else: self.logger.info('%sError%s: Failed to remove task files from crab server cache' % (colors.RED, colors.NORMAL)) cacheresult = 'FAILED' if not self.options.cacheonly: self.logger.debug('%sSuccess%s: Successfully got schedd address' % (colors.GREEN, colors.NORMAL)) self.logger.debug('Schedd address: %s' % scheddaddress) self.logger.info('Attempting to remove task from schedd') gssishrm = 'gsissh -o ConnectTimeout=60 -o PasswordAuthentication=no ' + scheddaddress + ' rm -rf ' + self.cachedinfo['RequestName'] self.logger.debug('gsissh command: %s' % gssishrm) delprocess=subprocess.Popen(gssishrm, stdout= subprocess.PIPE, stderr= subprocess.PIPE, shell=True) stdout, stderr = delprocess.communicate() exitcode = delprocess.returncode if exitcode == 0 : self.logger.info('%sSuccess%s: Successfully removed task from schedd' % (colors.GREEN, colors.NORMAL)) scheddresult = 'SUCCESS' gsisshdict = {} else : self.logger.info('%sError%s: Failed to remove task from schedd' % (colors.RED, colors.NORMAL)) scheddaddress = 'FAILED' self.logger.debug('gsissh stdout: %s\ngsissh stderr: %s\ngsissh exitcode: %s' % (stdout,stderr,exitcode)) gsisshdict = {'stdout' : stdout, 'stderr' : stderr , 'exitcode' : exitcode} return {'cacheresult' : cacheresult , 'scheddresult' : scheddresult , 'gsiresult' : gsisshdict}