Ejemplo n.º 1
0
 def handleProxy(self):
     """ 
     Init the user proxy, and delegate it if necessary.
     """
     if not self.options.proxy:
         if self.cmdconf['initializeProxy']:
             self.proxy.setVOGroupVORole(self.voGroup, self.voRole)
             self.proxy.setMyProxyAccount(self.serverurl)
             _, self.proxyfilename = self.proxy.createNewVomsProxy(
                 time_left_threshold=720,
                 proxy_created_by_crab=self.proxy_created)
             if self.cmdconf[
                     'requiresREST']:  ## If the command doesn't contact the REST, we can't delegate the proxy.
                 self.proxy.myproxyAccount = self.serverurl
                 baseurl = self.getUrl(self.instance, resource='info')
                 ## Get the DN of the task workers from the server.
                 all_task_workers_dns = server_info('delegatedn',
                                                    self.serverurl,
                                                    self.proxyfilename,
                                                    baseurl)
                 for serverdn in all_task_workers_dns['services']:
                     self.proxy.setServerDN(serverdn)
                     self.proxy.setMyProxyServer('myproxy.cern.ch')
                     self.logger.debug(
                         "Registering user credentials for server %s" %
                         serverdn)
                     self.proxy.createNewMyProxy(timeleftthreshold=60 * 60 *
                                                 24 *
                                                 RENEW_MYPROXY_THRESHOLD,
                                                 nokey=True)
     else:
         self.proxyfilename = self.options.proxy
         os.environ['X509_USER_PROXY'] = self.options.proxy
         self.logger.debug('Skipping proxy creation')
Ejemplo n.º 2
0
 def handleProxy(self):
     """ Init the user proxy, and delegate it if necessary.
     """
     if not self.options.skipProxy and self.initializeProxy:
         _, self.proxyfilename, proxyobj = initProxy(
             self.voRole, self.voGroup, self.logger)
         #get the dn of the agents from the server
         alldns = server_info('delegatedn', self.serverurl,
                              self.proxyfilename)
         #for each agentDN received from the server, delegate it!
         #XXX Temporary solution. Need to figure out how to delegate credential to the several WMAgent
         #without forcing the user to insert the password several times
         if 'rest' in alldns and alldns['rest']:
             delegateProxy(alldns['rest'],
                           'myproxy.cern.ch',
                           proxyobj,
                           self.logger,
                           nokey=True)
         if 'services' in alldns:
             for serverdn in alldns['services']:
                 delegateProxy(serverdn,
                               'myproxy.cern.ch',
                               proxyobj,
                               self.logger,
                               nokey=False)
     else:
         self.proxyfilename = self.options.skipProxy
         self.logger.debug('Skipping proxy creation')
Ejemplo n.º 3
0
 def checkversion(self, baseurl = None):
     compatibleversion = server_info('version', self.serverurl, self.proxyfilename, baseurl)
     if __version__ in compatibleversion:
         self.logger.debug("CRABClient version: %s Compatible"  % __version__)
     else:
         self.logger.info("%sWARNING%s: Incompatible CRABClient version \"%s\" " % (colors.RED, colors.NORMAL , __version__ ))
         self.logger.info("Server is saying that compatible versions are: %s"  % compatibleversion)
Ejemplo n.º 4
0
    def handleProxy(self ):
        """ Init the user proxy, and delegate it if necessary.
        """

        if not self.options.skipProxy and self.initializeProxy:
            proxy = CredentialInteractions('', '', self.voRole, self.voGroup, self.logger, myproxyAccount=self.serverurl)

            self.proxy = proxy

            self.logger.debug("Checking credentials")
            _, self.proxyfilename = proxy.createNewVomsProxy( timeleftthreshold = 720 )

            if self.requiresREST: #if the command does not contact the REST we can't delegate the proxy
                proxy.myproxyAccount = self.serverurl
                baseurl = self.getUrl(self.instance, resource='info')
                #get the dn of the task workers from the server
                alldns = server_info('delegatedn', self.serverurl, self.proxyfilename, baseurl)

                for serverdn in alldns['services']:
                    proxy.defaultDelegation['serverDN'] = serverdn
                    proxy.defaultDelegation['myProxySvr'] = 'myproxy.cern.ch'

                    self.logger.debug("Registering user credentials for server %s" % serverdn)
                    proxy.createNewMyProxy( timeleftthreshold = 60 * 60 * 24 * RENEW_MYPROXY_THRESHOLD, nokey=True)
        else:
            self.proxyfilename = self.options.skipProxy
            os.environ['X509_USER_PROXY'] = self.options.skipProxy
            self.logger.debug('Skipping proxy creation')
Ejemplo n.º 5
0
    def __call__(self):

        self.logger.info('Getting the tarball hash key')

        tarballdir = glob.glob(self.requestarea+'/inputs/*.tgz')
        if len(tarballdir) != 1:
            self.logger.info('%sError%s: Could not find tarball or there is more than one tarball'% (colors.RED, colors.NORMAL))
            raise ConfigurationException
        tarballdir = tarballdir[0]

        #checking task status

        self.logger.info('Checking task status')
        serverFactory = CRABClient.Emulator.getEmulator('rest')
        server = serverFactory(self.serverurl, self.proxyfilename, self.proxyfilename, version=__version__)
        dictresult, status, _ = server.get(self.uri, data = {'workflow': self.cachedinfo['RequestName'], 'verbose': 0})

        dictresult = dictresult['result'][0] #take just the significant part

        if status != 200:
            msg = "Problem retrieving task status:\ninput: %s\noutput: %s\nreason: %s" % (str(self.cachedinfo['RequestName']), str(dictresult), str(reason))
            raise RESTCommunicationException(msg)

        self.logger.info('Task status: %s' % dictresult['status'])
        accepstate = ['KILLED','FINISHED','FAILED','KILLFAILED', 'COMPLETED']
        if dictresult['status'] not in accepstate:
            msg = ('%sError%s: Only tasks with these status can be purged: {0}'.format(accepstate) % (colors.RED, colors.NORMAL))
            raise ConfigurationException(msg)

        #getting the cache url
        cacheresult = {}
        scheddresult = {}
        gsisshdict = {}
        if not self.options.scheddonly:
            baseurl = getUrl(self.instance, resource='info')
            cacheurl = server_info('backendurls', self.serverurl, self.proxyfilename, baseurl)
            cacheurl = cacheurl['cacheSSL']
            cacheurldict = {'endpoint': cacheurl, 'pycurl': True}

            ufc = UserFileCache(cacheurldict)
            hashkey = ufc.checksum(tarballdir)
            self.logger.info('Tarball hashkey: %s' %hashkey)
            self.logger.info('Attempting to remove task file from crab server cache')

            try:
                ufcresult = ufc.removeFile(hashkey)
            except HTTPException, re:
                if re.headers.has_key('X-Error-Info') and 'Not such file' in re.headers['X-Error-Info']:
                    self.logger.info('%sError%s: Failed to find task file in crab server cache; the file might have been already purged' % (colors.RED,colors.NORMAL))
                    raise HTTPException , re

            if ufcresult == '':
                self.logger.info('%sSuccess%s: Successfully removed task files from crab server cache' % (colors.GREEN, colors.NORMAL))
                cacheresult = 'SUCCESS'
            else:
                self.logger.info('%sError%s: Failed to remove task files from crab server cache' % (colors.RED, colors.NORMAL))
                cacheresult = 'FAILED'
Ejemplo n.º 6
0
 def checkversion(self, baseurl=None):
     compatibleversion = server_info('version', self.serverurl,
                                     self.proxyfilename, baseurl)
     if __version__ in compatibleversion:
         self.logger.debug("CRABClient version: %s Compatible" %
                           __version__)
     else:
         self.logger.info(
             "%sWARNING%s: Incompatible CRABClient version \"%s\" " %
             (colors.RED, colors.NORMAL, __version__))
         self.logger.info(
             "Server is saying that compatible versions are: %s" %
             compatibleversion)
Ejemplo n.º 7
0
 def handleProxy(self):
     """ Init the user proxy, and delegate it if necessary.
     """
     if not self.options.skipProxy and self.initializeProxy:
         _, self.proxyfilename, proxyobj = initProxy( self.voRole, self.voGroup, self.logger )
         #get the dn of the agents from the server
         alldns = server_info('delegatedn', self.serverurl, self.proxyfilename)
         #for each agentDN received from the server, delegate it!
         #XXX Temporary solution. Need to figure out how to delegate credential to the several WMAgent
         #without forcing the user to insert the password several times
         if 'rest' in alldns and alldns['rest']:
             delegateProxy(alldns['rest'], 'myproxy.cern.ch', proxyobj, self.logger, nokey=True)
         if 'services' in alldns:
             for serverdn in alldns['services']:
                 delegateProxy(serverdn, 'myproxy.cern.ch', proxyobj, self.logger, nokey=False)
     else:
         self.proxyfilename = self.options.skipProxy
         self.logger.debug('Skipping proxy creation')
Ejemplo n.º 8
0
 def handleProxy(self):
     """ 
     Init the user proxy, and delegate it if necessary.
     """
     if not self.options.proxy:
         if self.cmdconf['initializeProxy']:
             self.proxy.setVOGroupVORole(self.voGroup, self.voRole)
             self.proxy.setMyProxyAccount(self.serverurl)
             _, self.proxyfilename = self.proxy.createNewVomsProxy(time_left_threshold = 720, proxy_created_by_crab = self.proxy_created)
             if self.cmdconf['requiresREST']: ## If the command doesn't contact the REST, we can't delegate the proxy.
                 self.proxy.myproxyAccount = self.serverurl
                 baseurl = self.getUrl(self.instance, resource = 'info')
                 ## Get the DN of the task workers from the server.
                 all_task_workers_dns = server_info('delegatedn', self.serverurl, self.proxyfilename, baseurl)
                 for serverdn in all_task_workers_dns['services']:
                     self.proxy.setServerDN(serverdn)
                     self.proxy.setMyProxyServer('myproxy.cern.ch')
                     self.logger.debug("Registering user credentials for server %s" % serverdn)
                     self.proxy.createNewMyProxy(timeleftthreshold = 60 * 60 * 24 * RENEW_MYPROXY_THRESHOLD, nokey = True)
     else:
         self.proxyfilename = self.options.proxy
         os.environ['X509_USER_PROXY'] = self.options.proxy
         self.logger.debug('Skipping proxy creation')
Ejemplo n.º 9
0
    def __call__(self):

        self.logger.info('Getting the tarball hash key')

        tarballdir=glob.glob(self.requestarea+'/inputs/*.tgz')
        if len(tarballdir) != 1 :
            self.logger.info('%sError%s: Could not find tarball or there is more than one tarball'% (colors.RED, colors.NORMAL))
            raise ConfigurationException
        tarballdir=tarballdir[0]

        #checking task status

        self.logger.info('Checking task status')
        server = HTTPRequests(self.serverurl, self.proxyfilename, self.proxyfilename, version=__version__)
        dictresult, status, _ = server.get(self.uri, data = { 'workflow' : self.cachedinfo['RequestName'], 'verbose': 0 })

        dictresult = dictresult['result'][0] #take just the significant part

        if status != 200:
            msg = "Problem retrieving status:\ninput:%s\noutput:%s\nreason:%s" % (str(self.cachedinfo['RequestName']), str(dictresult), str(reason))
            raise RESTCommunicationException(msg)

        self.logger.info('Task status: %s' % dictresult['status'])
        accepstate = ['KILLED','FINISHED','FAILED','KILLFAILED', 'COMPLETED']
        if dictresult['status'] not in accepstate:
            msg = ('%sError%s: Only task with this status can be purge: {0}'.format(accepstate) % (colors.RED, colors.NORMAL))
            raise ConfigurationException(msg)

        #getting the cache url

        if not self.options.scheddonly:
            baseurl=self.getUrl(self.instance, resource='info')
            cacheurl=server_info('backendurls', self.serverurl, self.proxyfilename, baseurl)
            cacheurl=cacheurl['cacheSSL']
            cacheurldict={'endpoint' : cacheurl, 'pycurl': True}

            ufc = UserFileCache(cacheurldict)
            hashkey = ufc.checksum(tarballdir)
            self.logger.info('Tarball hashkey :%s' %hashkey)
            self.logger.info('Attempting to clean user file cache')
            ufcresult = ufc.removeFile(hashkey)
            if ufcresult == '' :
                self.logger.info('%sSuccess%s: Successfully remove file from cache' % (colors.GREEN, colors.NORMAL))
            else:
                self.logger.info('%sError%s: Failed to remove the file from cache' % (colors.RED, colors.NORMAL))

        if not self.options.cacheonly:
            self.logger.info('Getting the schedd address')
            baseurl=self.getUrl(self.instance, resource='info')
            try:
                sceddaddress = server_info('scheddaddress', self.serverurl, self.proxyfilename, baseurl, workflow = self.cachedinfo['RequestName'] )
            except HTTPException, he:
                self.logger.info('%sError%s: Failed to get the schedd address' % (colors.RED, colors.NORMAL))
                raise HTTPException,he
            self.logger.debug('%sSuccess%s: Successfully getting schedd address' % (colors.GREEN, colors.NORMAL))
            self.logger.debug('Schedd address: %s' % sceddaddress)
            self.logger.info('Attempting to clean user file schedd')

            gssishrm = 'gsissh -o ConnectTimeout=60 -o PasswordAuthentication=no ' + sceddaddress + ' rm -rf ' + self.cachedinfo['RequestName']
            self.logger.debug('gsissh command: %s' % gssishrm)

            delprocess=subprocess.Popen(gssishrm, stdout= subprocess.PIPE, stderr= subprocess.PIPE, shell=True)
            stdout, stderr = delprocess.communicate()
            exitcode = delprocess.returncode

            if exitcode == 0 :
                self.logger.info('%sSuccess%s: Successfully remove task from scehdd' % (colors.GREEN, colors.NORMAL))
            else :
                self.logger.info('%sError%s: Failed to remove task from schedd' % (colors.RED, colors.NORMAL))
                self.logger.debug('gsissh stdout: %s\ngsissh stderr: %s\ngsissh exitcode: %s' % (stdout,stderr,exitcode))
Ejemplo n.º 10
0
    def __call__(self):
        valid = False
        configmsg = 'Default'

        self.logger.debug("Started submission")
        serverFactory = CRABClient.Emulator.getEmulator('rest')
        # Get some debug parameters
        ######### Check if the user provided unexpected parameters ########
        #init the dictionary with all the known parameters
        all_config_params = [x for x in parameters_mapping['other-config-params']]
        for _, val in parameters_mapping['on-server'].iteritems():
            if val['config']:
                all_config_params.extend(val['config'])
        SpellChecker.DICTIONARY = SpellChecker.train(all_config_params)
        #iterate on the parameters provided by the user
        for section in self.configuration.listSections_():
            for attr in getattr(self.configuration, section).listSections_():
                par = (section + '.' + attr)
                #if the parameter is not know exit, but try to correct it before
                if not SpellChecker.is_correct( par ):
                    msg = 'The parameter %s is not known.\nPlease refer to <https://twiki.cern.ch/twiki/bin/view/CMSPublic/WorkBookCRAB3Tutorial#CRAB_configuration_parameters> for list of valid parameter.\nSee the ./crab.log file for more details"' % par
                    msg += '' if SpellChecker.correct(par) == par else '\nOr maybe did you mean %s?' % SpellChecker.correct(par)
                    raise ConfigurationException(msg)

        #usertarball and cmsswconfig use this parameter and we should set it up in a correct way
        self.configuration.General.serverUrl = self.serverurl

        uniquerequestname = None

        self.logger.debug("Working on %s" % str(self.requestarea))

        configreq = {}
        for param in parameters_mapping['on-server']:
            mustbetype = getattr(types, parameters_mapping['on-server'][param]['type'])
            default = parameters_mapping['on-server'][param]['default']
            config_params = parameters_mapping['on-server'][param]['config']
            for config_param in config_params:
                attrs = config_param.split('.')
                temp = self.configuration
                for attr in attrs:
                    temp = getattr(temp, attr, None)
                    if temp is None:
                        break
                if temp is not None:
                    configreq[param] = temp
                    break
                elif default is not None:
                    configreq[param] = default
                    temp = default
                else:
                    ## Parameter not strictly required.
                    pass
            ## Check that the requestname is of the right type.
            ## This is not checked in SubCommand.validateConfig().
            if param == 'workflow':
                if mustbetype == type(self.requestname):
                    configreq['workflow'] = self.requestname
            ## Translate boolean flags into integers.
            elif param in ['savelogsflag', 'publication', 'nonprodsw', 'useparent', 'ignorelocality', 'saveoutput', 'oneEventMode']:
                configreq[param] = 1 if temp else 0
            ## Translate DBS URL aliases into DBS URLs.
            elif param in ['dbsurl', 'publishdbsurl']:
                if param == 'dbsurl':
                    dbstype = 'reader'
                elif param == 'publishdbsurl':
                    dbstype = 'writer'
                allowed_dbsurls = DBSURLS[dbstype].values()
                allowed_dbsurls_aliases = DBSURLS[dbstype].keys()
                if configreq[param] in allowed_dbsurls_aliases:
                    configreq[param] = DBSURLS[dbstype][configreq[param]]
                elif configreq[param].rstrip('/') in allowed_dbsurls:
                    configreq[param] = configreq[param].rstrip('/')
            elif param == 'scriptexe' and 'scriptexe' in configreq:
                configreq[param] = os.path.basename(configreq[param])

        jobconfig = {}
        self.configuration.JobType.proxyfilename = self.proxyfilename
        self.configuration.JobType.capath = serverFactory.getCACertPath()
        #get the backend URLs from the server external configuration
        serverBackendURLs = server_info('backendurls', self.serverurl, self.proxyfilename, getUrl(self.instance, resource='info'))
        #if cacheSSL is specified in the server external configuration we will use it to upload the sandbox (baseURL will be ignored)
        self.configuration.JobType.filecacheurl = serverBackendURLs['cacheSSL'] if 'cacheSSL' in serverBackendURLs else None
        pluginParams = [ self.configuration, self.logger, os.path.join(self.requestarea, 'inputs') ]
        crab_job_types = getJobTypes()
        if upper(configreq['jobtype']) in crab_job_types:
            plugjobtype = crab_job_types[upper(configreq['jobtype'])](*pluginParams)
            inputfiles, jobconfig, isbchecksum = plugjobtype.run(configreq)
        else:
            fullname = configreq['jobtype']
            basename = os.path.basename(fullname).split('.')[0]
            plugin = addPlugin(fullname)[basename]
            pluginInst = plugin(*pluginParams)
            inputfiles, jobconfig, isbchecksum = pluginInst.run(configreq)

        if configreq['publication']:
            non_edm_files = jobconfig['tfileoutfiles'] + jobconfig['addoutputfiles']
            if non_edm_files:
                msg = "%sWARNING%s: The following output files will not be published, as they are not EDM files: %s" % (colors.RED, colors.NORMAL, non_edm_files)
                self.logger.warning(msg)

        if not configreq['publishname']:
            configreq['publishname'] =  isbchecksum
        else:
            configreq['publishname'] = "%s-%s" %(configreq['publishname'], isbchecksum)
        configreq.update(jobconfig)
        server = serverFactory(self.serverurl, self.proxyfilename, self.proxyfilename, version=__version__)

        self.logger.info("Sending the request to the server")
        self.logger.debug("Submitting %s " % str(configreq))
        ## TODO: this shouldn't be hard-coded.
        listParams = ['adduserfiles', 'addoutputfiles', 'sitewhitelist', 'siteblacklist', 'blockwhitelist', 'blockblacklist', \
                      'tfileoutfiles', 'edmoutfiles', 'runs', 'lumis', 'userfiles', 'scriptargs', 'extrajdl']
        configreq_encoded = self._encodeRequest(configreq, listParams)
        self.logger.debug('Encoded submit request: %s' % (configreq_encoded))

        dictresult, status, reason = server.put( self.uri, data = configreq_encoded)
        self.logger.debug("Result: %s" % dictresult)
        if status != 200:
            msg = "Problem sending the request:\ninput:%s\noutput:%s\nreason:%s" % (str(configreq), str(dictresult), str(reason))
            raise RESTCommunicationException(msg)
        elif dictresult.has_key("result"):
            uniquerequestname = dictresult["result"][0]["RequestName"]
        else:
            msg = "Problem during submission, no request ID returned:\ninput:%s\noutput:%s\nreason:%s" \
                   % (str(configreq), str(dictresult), str(reason))
            raise RESTCommunicationException(msg)

        tmpsplit = self.serverurl.split(':')
        createCache(self.requestarea, tmpsplit[0], tmpsplit[1] if len(tmpsplit)>1 else '', uniquerequestname,
                    voRole=self.voRole, voGroup=self.voGroup, instance=self.instance,
                    originalConfig = self.configuration)

        self.logger.info("%sSuccess%s: Your task has been delivered to the CRAB3 server." %(colors.GREEN, colors.NORMAL))
        if not self.options.wait:
            self.logger.info("Task name: %s" % uniquerequestname)
            self.logger.info("Please use 'crab status' to check how the submission process proceed")

        if self.options.wait:
            self.checkStatusLoop(server,uniquerequestname)

        self.logger.debug("About to return")

        return {'requestname' : self.requestname , 'uniquerequestname' : uniquerequestname }
Ejemplo n.º 11
0
    def __call__(self):

        self.logger.info('Getting the tarball hash key')

        tarballdir = glob.glob(self.requestarea + '/inputs/*.tgz')
        if len(tarballdir) != 1:
            self.logger.info(
                '%sError%s: Could not find tarball or there is more than one tarball'
                % (colors.RED, colors.NORMAL))
            raise ConfigurationException
        tarballdir = tarballdir[0]

        #checking task status

        self.logger.info('Checking task status')
        serverFactory = CRABClient.Emulator.getEmulator('rest')
        server = serverFactory(self.serverurl,
                               self.proxyfilename,
                               self.proxyfilename,
                               version=__version__)
        dictresult, status, _ = server.get(self.uri,
                                           data={
                                               'workflow':
                                               self.cachedinfo['RequestName'],
                                               'verbose':
                                               0
                                           })

        dictresult = dictresult['result'][0]  #take just the significant part

        if status != 200:
            msg = "Problem retrieving task status:\ninput: %s\noutput: %s\nreason: %s" % (
                str(self.cachedinfo['RequestName']), str(dictresult),
                str(reason))
            raise RESTCommunicationException(msg)

        self.logger.info('Task status: %s' % dictresult['status'])
        accepstate = [
            'KILLED', 'FINISHED', 'FAILED', 'KILLFAILED', 'COMPLETED'
        ]
        if dictresult['status'] not in accepstate:
            msg = ('%sError%s: Only tasks with these status can be purged: {0}'
                   .format(accepstate) % (colors.RED, colors.NORMAL))
            raise ConfigurationException(msg)

        #getting the cache url
        cacheresult = {}
        scheddresult = {}
        gsisshdict = {}
        if not self.options.scheddonly:
            baseurl = getUrl(self.instance, resource='info')
            cacheurl = server_info('backendurls', self.serverurl,
                                   self.proxyfilename, baseurl)
            cacheurl = cacheurl['cacheSSL']
            cacheurldict = {'endpoint': cacheurl, 'pycurl': True}

            ufc = UserFileCache(cacheurldict)
            hashkey = ufc.checksum(tarballdir)
            self.logger.info('Tarball hashkey: %s' % hashkey)
            self.logger.info(
                'Attempting to remove task file from crab server cache')

            try:
                ufcresult = ufc.removeFile(hashkey)
            except HTTPException, re:
                if re.headers.has_key(
                        'X-Error-Info'
                ) and 'Not such file' in re.headers['X-Error-Info']:
                    self.logger.info(
                        '%sError%s: Failed to find task file in crab server cache; the file might have been already purged'
                        % (colors.RED, colors.NORMAL))
                    raise HTTPException, re

            if ufcresult == '':
                self.logger.info(
                    '%sSuccess%s: Successfully removed task files from crab server cache'
                    % (colors.GREEN, colors.NORMAL))
                cacheresult = 'SUCCESS'
            else:
                self.logger.info(
                    '%sError%s: Failed to remove task files from crab server cache'
                    % (colors.RED, colors.NORMAL))
                cacheresult = 'FAILED'
Ejemplo n.º 12
0
class purge(SubCommand):
    """
    clean user schedd and cache for a given task. User must specify the taskname to be purge
    """

    visible = True

    def __call__(self):

        self.logger.info('Getting the tarball hash key')

        tarballdir = glob.glob(self.requestarea + '/inputs/*.tgz')
        if len(tarballdir) != 1:
            self.logger.info(
                '%sError%s: Could not find tarball or there is more than one tarball'
                % (colors.RED, colors.NORMAL))
            raise ConfigurationException
        tarballdir = tarballdir[0]

        #checking task status

        self.logger.info('Checking task status')
        serverFactory = CRABClient.Emulator.getEmulator('rest')
        server = serverFactory(self.serverurl,
                               self.proxyfilename,
                               self.proxyfilename,
                               version=__version__)
        dictresult, status, _ = server.get(self.uri,
                                           data={
                                               'workflow':
                                               self.cachedinfo['RequestName'],
                                               'verbose':
                                               0
                                           })

        dictresult = dictresult['result'][0]  #take just the significant part

        if status != 200:
            msg = "Problem retrieving task status:\ninput: %s\noutput: %s\nreason: %s" % (
                str(self.cachedinfo['RequestName']), str(dictresult),
                str(reason))
            raise RESTCommunicationException(msg)

        self.logger.info('Task status: %s' % dictresult['status'])
        accepstate = [
            'KILLED', 'FINISHED', 'FAILED', 'KILLFAILED', 'COMPLETED'
        ]
        if dictresult['status'] not in accepstate:
            msg = ('%sError%s: Only tasks with these status can be purged: {0}'
                   .format(accepstate) % (colors.RED, colors.NORMAL))
            raise ConfigurationException(msg)

        #getting the cache url
        cacheresult = {}
        scheddresult = {}
        gsisshdict = {}
        if not self.options.scheddonly:
            baseurl = getUrl(self.instance, resource='info')
            cacheurl = server_info('backendurls', self.serverurl,
                                   self.proxyfilename, baseurl)
            cacheurl = cacheurl['cacheSSL']
            cacheurldict = {'endpoint': cacheurl, 'pycurl': True}

            ufc = UserFileCache(cacheurldict)
            hashkey = ufc.checksum(tarballdir)
            self.logger.info('Tarball hashkey: %s' % hashkey)
            self.logger.info(
                'Attempting to remove task file from crab server cache')

            try:
                ufcresult = ufc.removeFile(hashkey)
            except HTTPException, re:
                if re.headers.has_key(
                        'X-Error-Info'
                ) and 'Not such file' in re.headers['X-Error-Info']:
                    self.logger.info(
                        '%sError%s: Failed to find task file in crab server cache; the file might have been already purged'
                        % (colors.RED, colors.NORMAL))
                    raise HTTPException, re

            if ufcresult == '':
                self.logger.info(
                    '%sSuccess%s: Successfully removed task files from crab server cache'
                    % (colors.GREEN, colors.NORMAL))
                cacheresult = 'SUCCESS'
            else:
                self.logger.info(
                    '%sError%s: Failed to remove task files from crab server cache'
                    % (colors.RED, colors.NORMAL))
                cacheresult = 'FAILED'

        if not self.options.cacheonly:
            self.logger.info('Getting schedd address')
            baseurl = self.getUrl(self.instance, resource='info')
            try:
                scheddaddress = server_info(
                    'scheddaddress',
                    self.serverurl,
                    self.proxyfilename,
                    baseurl,
                    workflow=self.cachedinfo['RequestName'])
            except HTTPException, he:
                self.logger.info('%sError%s: Failed to get schedd address' %
                                 (colors.RED, colors.NORMAL))
                raise HTTPException, he
            self.logger.debug('%sSuccess%s: Successfully got schedd address' %
                              (colors.GREEN, colors.NORMAL))
            self.logger.debug('Schedd address: %s' % scheddaddress)
            self.logger.info('Attempting to remove task from schedd')

            gssishrm = 'gsissh -o ConnectTimeout=60 -o PasswordAuthentication=no ' + scheddaddress + ' rm -rf ' + self.cachedinfo[
                'RequestName']
            self.logger.debug('gsissh command: %s' % gssishrm)

            delprocess = subprocess.Popen(gssishrm,
                                          stdout=subprocess.PIPE,
                                          stderr=subprocess.PIPE,
                                          shell=True)
            stdout, stderr = delprocess.communicate()
            exitcode = delprocess.returncode

            if exitcode == 0:
                self.logger.info(
                    '%sSuccess%s: Successfully removed task from scehdd' %
                    (colors.GREEN, colors.NORMAL))
                scheddresult = 'SUCCESS'
                gsisshdict = {}
            else:
                self.logger.info(
                    '%sError%s: Failed to remove task from schedd' %
                    (colors.RED, colors.NORMAL))
                scheddaddress = 'FAILED'
                self.logger.debug(
                    'gsissh stdout: %s\ngsissh stderr: %s\ngsissh exitcode: %s'
                    % (stdout, stderr, exitcode))
                gsisshdict = {
                    'stdout': stdout,
                    'stderr': stderr,
                    'exitcode': exitcode
                }

            return {
                'cacheresult': cacheresult,
                'scheddresult': scheddresult,
                'gsiresult': gsisshdict
            }
Ejemplo n.º 13
0
    def __call__(self):
        valid = False
        configmsg = 'Default'

        self.logger.debug("Started submission")
        # Get some debug parameters
        oneEventMode = hasattr(self.configuration, 'Debug') and \
                                getattr(self.configuration.Debug, 'oneEventMode')
        ######### Check if the user provided unexpected parameters ########
        #init the dictionary with all the known parameters
        SpellChecker.DICTIONARY = SpellChecker.train( [ val['config'] for _, val in self.requestmapper.iteritems() if val['config'] ] + \
                                                      [ x for x in self.otherConfigParams ] )
        #iterate on the parameters provided by the user
        for section in self.configuration.listSections_():
            for attr in getattr(self.configuration, section).listSections_():
                par = (section + '.' + attr)
                #if the parameter is not know exit, but try to correct it before
                if not SpellChecker.is_correct( par ):
                    msg = 'The parameter %s is not known.' % par
                    msg += '' if SpellChecker.correct(par) == par else ' Did you mean %s?' % SpellChecker.correct(par)
                    raise ConfigurationException(msg)

        #usertarball and cmsswconfig use this parameter and we should set it up in a correct way
        self.configuration.General.serverUrl = self.serverurl

        uniquerequestname = None

        self.logger.debug("Working on %s" % str(self.requestarea))

        configreq = {}
        for param in self.requestmapper:
            mustbetype = getattr(types, self.requestmapper[param]['type'])
            if self.requestmapper[param]['config']:
                attrs = self.requestmapper[param]['config'].split('.')
                temp = self.configuration
                for attr in attrs:
                    temp = getattr(temp, attr, None)
                    if temp is None:
                        break
                if temp is not None:
                    if mustbetype == type(temp):
                        configreq[param] = temp
                    else:
                        raise ConfigurationException("Invalid type " + str(type(temp)) + " for parameter " + self.requestmapper[param]['config'] \
                                   + ". It is needed a " + str(mustbetype) + ".")
                elif self.requestmapper[param]['default'] is not None:
                    configreq[param] = self.requestmapper[param]['default']
                    temp = self.requestmapper[param]['default']
                elif self.requestmapper[param]['required']:
                    raise ConfigurationException("Missing parameter " + self.requestmapper[param]['config'] + " from the configuration.")
                else:
                    ## parameter not strictly required
                    pass
            if param == "workflow":
                if mustbetype == type(self.requestname):
                    configreq["workflow"] = self.requestname
            elif param in ['savelogsflag','publication','nonprodsw','ignorelocality','saveoutput']:#TODO use clientmappig to do this
                configreq[param] = 1 if temp else 0
            elif param in ['dbsurl','publishdbsurl']:
                if param == 'dbsurl':
                    dbstype = 'reader'
                elif param == 'publishdbsurl':
                    dbstype = 'writer'
                alloweddbsurls = DBSURLS[dbstype].values()
                alloweddbsurlsaliases = DBSURLS[dbstype].keys()
                if configreq[param] in alloweddbsurlsaliases:
                    configreq[param] = DBSURLS[dbstype][configreq[param]]
                else:
                    if configreq[param].rstrip('/') in alloweddbsurls:
                        configreq[param] = configreq[param].rstrip('/')
                    else:
                        raise ConfigurationException("Invalid argument " + configreq[param] + " for parameter " + self.requestmapper[param]['config'] + " in the configuration.")
        if (configreq['saveoutput'] or configreq['savelogsflag']) and 'asyncdest' not in configreq:
            raise ConfigurationException("Missing parameter " + self.requestmapper['asyncdest']['config'] + " from the configuration.")

        # Add debug parameters to the configreq dict
        configreq['oneEventMode'] = int(oneEventMode)

        jobconfig = {}
        self.configuration.JobType.proxyfilename = self.proxyfilename
        self.configuration.JobType.capath = HTTPRequests.getCACertPath()
        #get the backend URLs from the server external configuration
        serverBackendURLs = server_info('backendurls', self.serverurl, self.proxyfilename, self.getUrl(self.instance, resource='info'))
        #if cacheSSL is specified in the server external configuration we will use it to upload the sandbox (baseURL will be ignored)
        self.configuration.JobType.filecacheurl = serverBackendURLs['cacheSSL'] if 'cacheSSL' in serverBackendURLs else None
        pluginParams = [ self.configuration, self.logger, os.path.join(self.requestarea, 'inputs') ]
        if getattr(self.configuration.JobType, 'pluginName', None) is not None:
            jobtypes    = getJobTypes()
            plugjobtype = jobtypes[upper(self.configuration.JobType.pluginName)](*pluginParams)
            inputfiles, jobconfig, isbchecksum = plugjobtype.run(configreq)
        else:
            fullname = self.configuration.JobType.externalPluginFile
            basename = os.path.basename(fullname).split('.')[0]
            plugin = addPlugin(fullname)[basename]
            pluginInst = plugin(*pluginParams)
            inputfiles, jobconfig, isbchecksum = pluginInst.run(configreq)

        if not configreq['publishname']:
            configreq['publishname'] =  isbchecksum
        else:
            configreq['publishname'] = "%s-%s" %(configreq['publishname'], isbchecksum)
        configreq.update(jobconfig)

        server = HTTPRequests(self.serverurl, self.proxyfilename, self.proxyfilename, version=__version__)

        self.logger.info("Sending the request to the server")
        self.logger.debug("Submitting %s " % str( configreq ) )

        dictresult, status, reason = server.put( self.uri, data = self._encodeRequest(configreq) )
        self.logger.debug("Result: %s" % dictresult)
        if status != 200:
            msg = "Problem sending the request:\ninput:%s\noutput:%s\nreason:%s" % (str(configreq), str(dictresult), str(reason))
            raise RESTCommunicationException(msg)
        elif dictresult.has_key("result"):
            uniquerequestname = dictresult["result"][0]["RequestName"]
        else:
            msg = "Problem during submission, no request ID returned:\ninput:%s\noutput:%s\nreason:%s" \
                   % (str(configreq), str(dictresult), str(reason))
            raise RESTCommunicationException(msg)

        tmpsplit = self.serverurl.split(':')
        createCache(self.requestarea, tmpsplit[0], tmpsplit[1] if len(tmpsplit)>1 else '', uniquerequestname,
                    voRole=self.voRole, voGroup=self.voGroup, instance=self.instance,
                    originalConfig = self.configuration)

        self.logger.info("%sSuccess%s: Your task has been delivered to the CRAB3 server." %(colors.GREEN, colors.NORMAL))
        if not self.options.wait:
            self.logger.info("Please use 'crab status' to check how the submission process proceed")
            self.logger.debug("Request ID: %s " % uniquerequestname)

        if self.options.wait:
            self.checkStatusLoop(server,uniquerequestname)

        return uniquerequestname
Ejemplo n.º 14
0
    def __call__(self):
        valid = False
        configmsg = 'Default'

        self.logger.debug("Started submission")
        serverFactory = CRABClient.Emulator.getEmulator('rest')
        # Get some debug parameters
        ######### Check if the user provided unexpected parameters ########
        #init the dictionary with all the known parameters
        all_config_params = [
            x for x in parameters_mapping['other-config-params']
        ]
        for _, val in parameters_mapping['on-server'].iteritems():
            if val['config']:
                all_config_params.extend(val['config'])
        SpellChecker.DICTIONARY = SpellChecker.train(all_config_params)
        #iterate on the parameters provided by the user
        for section in self.configuration.listSections_():
            for attr in getattr(self.configuration, section).listSections_():
                par = (section + '.' + attr)
                #if the parameter is not know exit, but try to correct it before
                if not SpellChecker.is_correct(par):
                    msg = 'The parameter %s is not known.\nPlease refer to <https://twiki.cern.ch/twiki/bin/view/CMSPublic/WorkBookCRAB3Tutorial#CRAB_configuration_parameters> for list of valid parameter.\nSee the ./crab.log file for more details"' % par
                    msg += '' if SpellChecker.correct(
                        par
                    ) == par else '\nOr maybe did you mean %s?' % SpellChecker.correct(
                        par)
                    raise ConfigurationException(msg)

        #usertarball and cmsswconfig use this parameter and we should set it up in a correct way
        self.configuration.General.serverUrl = self.serverurl

        uniquerequestname = None

        self.logger.debug("Working on %s" % str(self.requestarea))

        configreq = {}
        for param in parameters_mapping['on-server']:
            mustbetype = getattr(
                types, parameters_mapping['on-server'][param]['type'])
            default = parameters_mapping['on-server'][param]['default']
            config_params = parameters_mapping['on-server'][param]['config']
            for config_param in config_params:
                attrs = config_param.split('.')
                temp = self.configuration
                for attr in attrs:
                    temp = getattr(temp, attr, None)
                    if temp is None:
                        break
                if temp is not None:
                    configreq[param] = temp
                    break
                elif default is not None:
                    configreq[param] = default
                    temp = default
                else:
                    ## Parameter not strictly required.
                    pass
            ## Check that the requestname is of the right type.
            ## This is not checked in SubCommand.validateConfig().
            if param == 'workflow':
                if mustbetype == type(self.requestname):
                    configreq['workflow'] = self.requestname
            ## Translate boolean flags into integers.
            elif param in [
                    'savelogsflag', 'publication', 'nonprodsw', 'useparent',
                    'ignorelocality', 'saveoutput', 'oneEventMode'
            ]:
                configreq[param] = 1 if temp else 0
            ## Translate DBS URL aliases into DBS URLs.
            elif param in ['dbsurl', 'publishdbsurl']:
                if param == 'dbsurl':
                    dbstype = 'reader'
                elif param == 'publishdbsurl':
                    dbstype = 'writer'
                allowed_dbsurls = DBSURLS[dbstype].values()
                allowed_dbsurls_aliases = DBSURLS[dbstype].keys()
                if configreq[param] in allowed_dbsurls_aliases:
                    configreq[param] = DBSURLS[dbstype][configreq[param]]
                elif configreq[param].rstrip('/') in allowed_dbsurls:
                    configreq[param] = configreq[param].rstrip('/')
            elif param == 'scriptexe' and 'scriptexe' in configreq:
                configreq[param] = os.path.basename(configreq[param])

        jobconfig = {}
        self.configuration.JobType.proxyfilename = self.proxyfilename
        self.configuration.JobType.capath = serverFactory.getCACertPath()
        #get the backend URLs from the server external configuration
        serverBackendURLs = server_info('backendurls', self.serverurl,
                                        self.proxyfilename,
                                        getUrl(self.instance, resource='info'))
        #if cacheSSL is specified in the server external configuration we will use it to upload the sandbox (baseURL will be ignored)
        self.configuration.JobType.filecacheurl = serverBackendURLs[
            'cacheSSL'] if 'cacheSSL' in serverBackendURLs else None
        pluginParams = [
            self.configuration, self.logger,
            os.path.join(self.requestarea, 'inputs')
        ]
        crab_job_types = getJobTypes()
        if upper(configreq['jobtype']) in crab_job_types:
            plugjobtype = crab_job_types[upper(
                configreq['jobtype'])](*pluginParams)
            inputfiles, jobconfig, isbchecksum = plugjobtype.run(configreq)
        else:
            fullname = configreq['jobtype']
            basename = os.path.basename(fullname).split('.')[0]
            plugin = addPlugin(fullname)[basename]
            pluginInst = plugin(*pluginParams)
            inputfiles, jobconfig, isbchecksum = pluginInst.run(configreq)

        if configreq['publication']:
            non_edm_files = jobconfig['tfileoutfiles'] + jobconfig[
                'addoutputfiles']
            if non_edm_files:
                msg = "%sWARNING%s: The following output files will not be published, as they are not EDM files: %s" % (
                    colors.RED, colors.NORMAL, non_edm_files)
                self.logger.warning(msg)

        if not configreq['publishname']:
            configreq['publishname'] = isbchecksum
        else:
            configreq['publishname'] = "%s-%s" % (configreq['publishname'],
                                                  isbchecksum)
        configreq.update(jobconfig)
        server = serverFactory(self.serverurl,
                               self.proxyfilename,
                               self.proxyfilename,
                               version=__version__)

        self.logger.info("Sending the request to the server")
        self.logger.debug("Submitting %s " % str(configreq))
        ## TODO: this shouldn't be hard-coded.
        listParams = ['adduserfiles', 'addoutputfiles', 'sitewhitelist', 'siteblacklist', 'blockwhitelist', 'blockblacklist', \
                      'tfileoutfiles', 'edmoutfiles', 'runs', 'lumis', 'userfiles', 'scriptargs', 'extrajdl']
        configreq_encoded = self._encodeRequest(configreq, listParams)
        self.logger.debug('Encoded submit request: %s' % (configreq_encoded))

        dictresult, status, reason = server.put(self.uri,
                                                data=configreq_encoded)
        self.logger.debug("Result: %s" % dictresult)
        if status != 200:
            msg = "Problem sending the request:\ninput:%s\noutput:%s\nreason:%s" % (
                str(configreq), str(dictresult), str(reason))
            raise RESTCommunicationException(msg)
        elif dictresult.has_key("result"):
            uniquerequestname = dictresult["result"][0]["RequestName"]
        else:
            msg = "Problem during submission, no request ID returned:\ninput:%s\noutput:%s\nreason:%s" \
                   % (str(configreq), str(dictresult), str(reason))
            raise RESTCommunicationException(msg)

        tmpsplit = self.serverurl.split(':')
        createCache(self.requestarea,
                    tmpsplit[0],
                    tmpsplit[1] if len(tmpsplit) > 1 else '',
                    uniquerequestname,
                    voRole=self.voRole,
                    voGroup=self.voGroup,
                    instance=self.instance,
                    originalConfig=self.configuration)

        self.logger.info(
            "%sSuccess%s: Your task has been delivered to the CRAB3 server." %
            (colors.GREEN, colors.NORMAL))
        if not self.options.wait:
            self.logger.info("Task name: %s" % uniquerequestname)
            self.logger.info(
                "Please use 'crab status' to check how the submission process proceed"
            )

        if self.options.wait:
            self.checkStatusLoop(server, uniquerequestname)

        self.logger.debug("About to return")

        return {
            'requestname': self.requestname,
            'uniquerequestname': uniquerequestname
        }
Ejemplo n.º 15
0
    def __call__(self):
        valid = False
        configmsg = 'Default'

        if not os.path.isfile(self.options.config):
            raise MissingOptionException("Configuration file '%s' not found" % self.options.config)

        self.logger.debug("Started submission")

        ######### Check if the user provided unexpected parameters ########
        #init the dictionary with all the known parameters
        SpellChecker.DICTIONARY = SpellChecker.train( [ val['config'] for _, val in self.requestmapper.iteritems() if val['config'] ] + \
                                                      [ x for x in self.otherConfigParams ] )
        #iterate on the parameters provided by the user
        for section in self.configuration.listSections_():
            for attr in getattr(self.configuration, section).listSections_():
                par = (section + '.' + attr)
                #if the parameter is not know exit, but try to correct it before
                if not SpellChecker.is_correct( par ):
                    msg = 'The parameter %s is not known.' % par
                    msg += '' if SpellChecker.correct(par) == par else ' Did you mean %s?' % SpellChecker.correct(par)
                    raise ConfigurationException(msg)

        #usertarball and cmsswconfig use this parameter and we should set it up in a correct way
        self.configuration.General.serverUrl = self.serverurl

        uniquerequestname = None

        self.logger.debug("Working on %s" % str(self.requestarea))

        configreq = {}
        for param in self.requestmapper:
            mustbetype = getattr(types, self.requestmapper[param]['type'])
            if self.requestmapper[param]['config']:
                attrs = self.requestmapper[param]['config'].split('.')
                temp = self.configuration
                for attr in attrs:
                    temp = getattr(temp, attr, None)
                    if temp is None:
                        break
                if temp:
                    if mustbetype == type(temp):
                        configreq[param] = temp
                    else:
                        raise ConfigurationException("Invalid type " + str(type(temp)) + " for parameter " + self.requestmapper[param]['config'] \
                                   + ". It is needed a " + str(mustbetype) + ".")
                elif self.requestmapper[param]['default'] is not None:
                    configreq[param] = self.requestmapper[param]['default']
                elif self.requestmapper[param]['required']:
                    raise ConfigurationException("Missing parameter " + self.requestmapper[param]['config'] + " from the configuration.")
                else:
                    ## parameter not strictly required
                    pass
            if param == "workflow":
                if mustbetype == type(self.requestname):
                    configreq["workflow"] = self.requestname
            elif param == "savelogsflag":
                configreq["savelogsflag"] = 1 if temp else 0
            elif param == "publication":
                configreq["publication"] = 1 if temp else 0
            elif param == "blacklistT1":
                blacklistT1 = self.voRole != 't1access'
                #if the user choose to remove the automatic T1 blacklisting and has not the t1acces role
                if getattr (self.configuration.Site, 'removeT1Blacklisting', False) and blacklistT1:
                    self.logger.info("WARNING: You disabled the T1 automatic blacklisting without having the t1access role")
                    blacklistT1 = False
                configreq["blacklistT1"] = 1 if blacklistT1 else 0

        jobconfig = {}
        self.configuration.JobType.proxyfilename = self.proxyfilename
        self.configuration.JobType.capath = HTTPRequests.getCACertPath()
        #get the backend URLs from the server external configuration
        serverBackendURLs = server_info('backendurls', self.serverurl, self.proxyfilename, self.getUrl(self.instance, resource='info'))
        #if cacheSSL is specified in the server external configuration we will use it to upload the sandbox (baseURL will be ignored)
        self.configuration.JobType.filecacheurl = serverBackendURLs['cacheSSL'] if 'cacheSSL' in serverBackendURLs else None
        #otherwise we will contact the baseurl to get the cache hostname
        self.configuration.JobType.baseurl = serverBackendURLs['baseURL']
        pluginParams = [ self.configuration, self.logger, os.path.join(self.requestarea, 'inputs') ]
        if getattr(self.configuration.JobType, 'pluginName', None) is not None:
            jobtypes    = getJobTypes()
            plugjobtype = jobtypes[upper(self.configuration.JobType.pluginName)](*pluginParams)
            inputfiles, jobconfig, isbchecksum = plugjobtype.run(configreq)
        else:
            fullname = self.configuration.JobType.externalPluginFile
            basename = os.path.basename(fullname).split('.')[0]
            plugin = addPlugin(fullname)[basename]
            pluginInst = plugin(*pluginParams)
            inputfiles, jobconfig, isbchecksum = pluginInst.run(configreq)

        if not configreq['publishname']:
            configreq['publishname'] =  isbchecksum
        else:
            configreq['publishname'] = "%s-%s" %(configreq['publishname'], isbchecksum)
        configreq.update(jobconfig)

        server = HTTPRequests(self.serverurl, self.proxyfilename, self.proxyfilename, version=__version__)

        self.logger.info("Sending the request to the server")
        self.logger.debug("Submitting %s " % str( configreq ) )

        dictresult, status, reason = server.put( self.uri, data = self._encodeRequest(configreq) )
        self.logger.debug("Result: %s" % dictresult)
        if status != 200:
            msg = "Problem sending the request:\ninput:%s\noutput:%s\nreason:%s" % (str(configreq), str(dictresult), str(reason))
            raise RESTCommunicationException(msg)
        elif dictresult.has_key("result"):
            uniquerequestname = dictresult["result"][0]["RequestName"]
        else:
            msg = "Problem during submission, no request ID returned:\ninput:%s\noutput:%s\nreason:%s" \
                   % (str(configreq), str(dictresult), str(reason))
            raise RESTCommunicationException(msg)

        tmpsplit = self.serverurl.split(':')
        createCache(self.requestarea, tmpsplit[0], tmpsplit[1] if len(tmpsplit)>1 else '', uniquerequestname,
                    voRole=self.voRole, voGroup=self.voGroup, instance=self.instance)

        self.logger.info("Submission completed")
        self.logger.debug("Request ID: %s " % uniquerequestname)

        self.logger.debug("Ended submission")

        return uniquerequestname