コード例 #1
0
    def __init__(self, cfg_params, nj_list):

        PostMortem.__init__(self, cfg_params, nj_list)

        # init client server params...
        CliServerParams(self)

        self.copyTout = setLcgTimeout()
        if common.scheduler.name().upper() in ['LSF', 'CAF']:
            self.copyTout = ' '

        if self.storage_path[0] != '/':
            self.storage_path = '/' + self.storage_path

        return
コード例 #2
0
    def __init__(self, cfg_params, nj_list):

        PostMortem.__init__(self, cfg_params, nj_list)

        # init client server params...
        CliServerParams(self)

        self.copyTout= setLcgTimeout()
        if common.scheduler.name().upper() in ['LSF', 'CAF']:
            self.copyTout= ' '

        if self.storage_path[0]!='/':
            self.storage_path = '/'+self.storage_path

        return
コード例 #3
0
        ## get the list of jobs to get logging.info skimmed by failed status
        logginable = self.skimDeadList()

        if self.storage_proto in ['globus']:
            for id in self.nj_list:
                if id not in self.all_jobs:
                    common.logger.info(
                        'Warning: job # ' + str(id) +
                        ' does not exist! Not possible to ask for postMortem ')
                elif id not in logginable:
                    common.logger.info(
                        'Warning: job # ' + str(id) +
                        ' not killed or aborted! Will get loggingInfo manually '
                    )
                    PostMortem.collectOneLogging(self, id)
            # construct a list of absolute paths of input files
            # and the destinations to copy them to
            sourcesList = []
            destsList = []
            self.taskuuid = str(common._db.queryTask('name'))
            common.logger.debug("Starting globus retrieval for task name: " +
                                self.taskuuid)
            remotedir = os.path.join(self.storage_path, self.taskuuid)
            for i in logginable:
                remotelog = remotedir + '/loggingInfo_' + str(i) + '.log'
                sourcesList.append(remotelog)
                fname = self.fname_base + str(i) + '.LoggingInfo'
                destsList.append(fname)

            # try to do the copy
コード例 #4
0
            msg = "ERROR: Unable to create destination interface \n"
            raise CrabException(msg)

        ## coupling se interfaces
        sbi = SBinterface(seEl, loc, logger = common.logger.logger)

        ## get the list of jobs to get logging.info skimmed by failed status
        logginable = self.skimDeadList()

        if self.storage_proto in ['globus']:
            for id in self.nj_list:
                if id not in self.all_jobs:
                    common.logger.info('Warning: job # ' + str(id) + ' does not exist! Not possible to ask for postMortem ')
                elif id not in logginable:
                    common.logger.info('Warning: job # ' + str(id) + ' not killed or aborted! Will get loggingInfo manually ')
                    PostMortem.collectOneLogging(self,id)
            # construct a list of absolute paths of input files
            # and the destinations to copy them to
            sourcesList = []
            destsList = []
            self.taskuuid = str(common._db.queryTask('name'))
            common.logger.debug( "Starting globus retrieval for task name: " + self.taskuuid)
            remotedir = os.path.join(self.storage_path, self.taskuuid)
            for i in logginable:
                remotelog = remotedir + '/loggingInfo_'+str(i)+'.log'
                sourcesList.append(remotelog)
                fname = self.fname_base + str(i) + '.LoggingInfo'
                destsList.append(fname)

            # try to do the copy
            copy_res = None
コード例 #5
0
ファイル: crab.py プロジェクト: ericvaandering/CRAB2
    def initializeActions_(self, opts):
        """
        For each user action instantiate a corresponding
        object and put it in the action dictionary.
        """

        for opt in opts.keys():

            val = opts[opt]


            if (  opt == '-create' ):
                if self.flag_continue:
                    msg =  'Cannot create an existing project. \n'
                    raise CrabException(msg)
                if val and val != 'all':
                    msg  = 'Per default, CRAB will create all jobs as specified in the crab.cfg file, not the command line!'
                    common.logger.info(msg)
                    msg  = 'Submission will still take into account the number of jobs specified on the command line!\n'
                    common.logger.info(msg)
                ncjobs = 'all'
            #wmbs 
                if int(self.automation) == 1:
                    msg  = 'Your jobs will be created on the server. Jobs informations will be known later.'
                    common.logger.info(msg)
             # wmbs 
                    from Requestor import Requestor
                    # Instantiate Creator object
                    self.creator = Requestor(self.job_type_name,
                                           self.cfg_params,
                                           ncjobs)
                    self.actions[opt] = self.creator
                
                    # Create and initialize JobList
                    common.job_list = JobList(1,self.creator.jobType())

                else:
                    from Creator import Creator
                    # Instantiate Creator object
                    self.creator = Creator(self.job_type_name,
                                           self.cfg_params,
                                           ncjobs)
                    self.actions[opt] = self.creator
                
                    # create jobs in the DB
                    common._db.createJobs_(self.creator.nJobsL())
                    # Create and initialize JobList
                    common.job_list = JobList(common._db.nJobs(),
                                                    self.creator.jobType())
                    ## jobs specs not needed if using WMBS      
                    self.creator.writeJobsSpecsToDB()

                taskinfo={}
                taskinfo['cfgName'] = common.work_space.jobDir()+"/"+self.creator.jobType().configFilename()
                taskinfo['dataset'] = self.cfg_params['CMSSW.datasetpath']

                common.job_list.setScriptNames(self.job_type_name+'.sh')
                common.job_list.setCfgNames(self.creator.jobType().configFilename())
                common._db.updateTask_(taskinfo)
                pass

            elif ( opt == '-submit' ):
                ## Dealt with val == int so that -submit N means submit N jobs and not job # N
                if (self.UseServer== 1):
                    from SubmitterServer import SubmitterServer
                    self.actions[opt] = SubmitterServer(self.cfg_params, self.parseRange_(val), val)
                else:
                    from Submitter import Submitter
                    # Instantiate Submitter object
                    self.actions[opt] = Submitter(self.cfg_params, self.parseRange_(val), val)
                    # Create and initialize JobList
                    if len(common.job_list) == 0 :
                        common.job_list = JobList(common._db.nJobs(),
                                                  None)
                        pass
                    pass

            elif ( opt == '-list' ):
                '''
                Print the relevant infos of a range-all jobs/task
                '''
                jobs = self.parseRange_(val)

                common._db.dump(jobs)
                pass

            elif ( opt == '-printId' ):
                '''
                Print the unique name of the task if crab is used as client
                Print the SID list of all the jobs
                '''
                #jid=False
                #if val == 'full': jid=True
                jid=True
                common._db.queryID(self.UseServer,jid)

            elif ( opt == '-status' ):
                if (self.UseServer== 1):
                    from StatusServer import StatusServer
                    self.actions[opt] = StatusServer(self.cfg_params, val)
                else:
                    from Status import Status
                    self.actions[opt] = Status(self.cfg_params, val)

            elif ( opt == '-kill' ):

                if val:
                    if val =='all':
                        jobs = common._db.nJobs("list")
                    else:
                        jobs = self.parseRange_(val)
                    pass
                else:
                    raise CrabException("Warning: with '-kill' you _MUST_ specify a job range or 'all'")
                    pass

                if (self.UseServer== 1):
                    from KillerServer import KillerServer
                    self.actions[opt] = KillerServer(self.cfg_params,jobs)
                else:
                    from Killer import Killer
                    self.actions[opt] = Killer(self.cfg_params,jobs)

            elif ( opt == '-stopWorkflow' ):

                if self.UseServer==1 and int(self.automation)==1:
                    from WorkflowHandler import WorkflowHandler
                    self.actions[opt] = WorkflowHandler(self.cfg_params)

                else:

                    raise CrabException("ERROR: you can use this command only if you are running an automated workflow using CRABSERVER")
                    pass

            elif ( opt == '-getoutput' or opt == '-get'):

                if val=='all' or val==None or val=='':
                    jobs = 'all'
                else:
                    jobs = self.parseRange_(val)

                if (self.UseServer== 1):
                    from GetOutputServer import GetOutputServer
                    self.actions[opt] = GetOutputServer(self.cfg_params,jobs)
                else:
                    from GetOutput import GetOutput
                    self.actions[opt] = GetOutput(self.cfg_params,jobs)

            elif ( opt == '-resubmit' ):
                if val:
                    if val=='all':
                        jobs = common._db.nJobs('list')
                    if val=='bad':
                        jobs = 'bad'
                    else:
                        jobs = self.parseRange_(val)

                    if (self.UseServer== 1):
                        from ResubmitterServer import ResubmitterServer
                        self.actions[opt] = ResubmitterServer(self.cfg_params, jobs)
                    else:
                        from Resubmitter import Resubmitter
                        self.actions[opt] = Resubmitter(self.cfg_params, jobs)
                else:
                    common.logger.info("Warning: with '-resubmit' you _MUST_ specify a job range or 'all'")
                    common.logger.info("WARNING: _all_ job specified in the range will be resubmitted!!!")
                    pass
                pass

            elif ( opt == '-forceResubmit' ):
                if val:
                    if val=='all':
                        jobs = common._db.nJobs('list')
                    else:
                        jobs = self.parseRange_(val)

                    if (self.UseServer== 1):
                        from ResubmitterServerForced import ResubmitterServerForced
                        self.actions[opt] = ResubmitterServerForced(self.cfg_params, jobs)
                    else:
                        from ResubmitterForced import ResubmitterForced
                        self.actions[opt] = ResubmitterForced(self.cfg_params, jobs)
                else:
                    common.logger.info("Warning: with '-resubmit' you _MUST_ specify a job range or 'all'")
                    common.logger.info("WARNING: _all_ job specified in the range will be resubmitted!!!")
                    pass
                pass

            elif ( opt in ['-testJdl','-listMatch', '-match']):
                jobs = self.parseRange_(val)

                if len(jobs) != 0:
                    # Instantiate Checker object
                    from Checker import Checker
                    self.actions[opt] = Checker(self.cfg_params, jobs)

            elif ( opt == '-postMortem' ):

                if val:
                    jobs = self.parseRange_(val)
                    if len( jobs ) > 1:
                        raise CrabException("Only single job id allowed for %s command!" % opt )
                else:
                    raise CrabException("Warning: please specify a job id")
                    pass

                if (self.UseServer== 1):
                    from PostMortemServer import PostMortemServer
                    self.actions[opt] = PostMortemServer(self.cfg_params,jobs)
                else:
                    from PostMortem import PostMortem
                    self.actions[opt] = PostMortem(self.cfg_params, jobs)

            elif ( opt == '-clean' ):
                if val != None:
                    raise CrabException("No range allowed for '-clean'")
                if (self.UseServer== 1):
                    from CleanerServer import CleanerServer
                    self.actions[opt] = CleanerServer(self.cfg_params)
                else:
                    from Cleaner import Cleaner
                    self.actions[opt] = Cleaner(self.cfg_params)

            elif ( opt in ['-printJdl','-createJdl']):
                """
                Materialize JDL
                """
                ## Temporary:
                if opt == '-printJdl':
                    common.logger.info("WARNING: -printJdl option is deprecated : please use -createJdl \n")
                if val =='all' or val == None or val == '':
                    jobs = common._db.nJobs("list")
                else:
                    jobs = self.parseRange_(val)
                pass
                from JdlWriter import JdlWriter
                self.actions[opt] = JdlWriter(self.cfg_params, jobs)

            elif ( opt == '-publish'):
                from Publisher import Publisher
                self.actions[opt] = Publisher(self.cfg_params)
            ### FEDE FOR PUBLICATION WITH NO INPUT FILES### 
            elif ( opt == '-publishNoInp'):
                from Publisher import Publisher
                self.cfg_params['USER.no_inp'] = 1
                self.actions[opt] = Publisher(self.cfg_params)

            elif ( opt == '-checkPublication' ):
                from InspectDBS import InspectDBS
                self.actions[opt] = InspectDBS(self.cfg_params)

            elif ( opt == '-copyData' ):
                if val =='all' or val == None or val == '':
                    jobs = common._db.nJobs("list")
                else:
                    jobs = self.parseRange_(val)

                if (self.UseServer== 1):
                    from StatusServer import StatusServer
                    status = StatusServer(self.cfg_params)
                else:
                 #   from Status import Status
                 #   status = Status(self.cfg_params)
                    status=None
                from CopyData import CopyData
                self.actions[opt] = CopyData(self.cfg_params, jobs,status)

            elif ( opt == '-validateCfg' ):
                from ValidateCfg import ValidateCfg
                config= {'pset' : self.cfg_params.get('CMSSW.pset','None')}
                if val :
                    config['pset']=val
                self.actions[opt] = ValidateCfg(config)

            elif ( opt == '-renewCredential' ):
                if (self.UseServer== 1):
                    from CredentialRenew import CredentialRenew
                    self.actions[opt] = CredentialRenew(self.cfg_params)
                else:
                    msg = "The option [-renewProxy] can be used only with the server modality!"
                    raise CrabException(msg)
            elif ( opt == '-report' ):
                if (self.UseServer== 1):
                    from StatusServer import StatusServer
                    StatusServer(self.cfg_params).query(display=False)
                else:
                    # cause a core dump....
                    #from Status import Status
                    #Status(self.cfg_params).query(display=False)
                    pass
                from Reporter import Reporter
                self.actions[opt] = Reporter(self.cfg_params)
            elif ( opt == '-cleanCache' ):
                from CacheCleaner import CacheCleaner
                self.actions[opt] = CacheCleaner()

            elif ( opt == '-uploadLog' ):
                jobid = -1
                jobs = self.parseRange_(val)
                if len( jobs ) > 1:
                    common.logger.info("Only single job id allowed for %s command!" % opt )
                elif len (jobs) == 1:
                    jobid = jobs[0]
                from ReportUploader import ReportUploader
                self.actions[opt] = ReportUploader( self.cfg_params, jobid )

            pass
        return