def Render(self): render = Widget.Render(self) for e in self.entries: title, widget, comment, use_submitter = e id = self.id props = '' if use_submitter: submit = Submitter(self._url) submit += widget else: submit = widget widget_r = submit.Render() widget_html = widget_r.html html = HTML_ENTRY % (locals()) render.html += html render.js += widget_r.js render.headers += widget_r.headers render.helps += widget_r.helps render.html = HTML_TABLE % (render.html) render.headers += HEADERS return render
def __init__(self, cfg_params, parsed_range, val): self.srvCfg = {} self.cfg_params = cfg_params self.submitRange = [] self.credentialType = 'Proxy' self.copyTout= setLcgTimeout() self.extended=int(cfg_params.get('CMSSW.extend',0)) #wmbs self.type = int(cfg_params.get('WMBS.automation',0)) self.taskType = 'fullySpecified' if self.type==1: self.taskType='partiallySpecified' if common.scheduler.name().upper() in ['LSF', 'CAF']: self.credentialType = 'Token' self.copyTout= ' ' Submitter.__init__(self, cfg_params, parsed_range, val) # init client server params... CliServerParams(self) # path fix if self.storage_path[0]!='/': self.storage_path = '/'+self.storage_path self.taskuuid = str(common._db.queryTask('name')) self.limitJobs = False return
def __init__(self, cfg_params, parsed_range, val): self.srvCfg = {} self.cfg_params = cfg_params self.submitRange = [] self.credentialType = 'Proxy' self.copyTout = setLcgTimeout() self.extended = int(cfg_params.get('CMSSW.extend', 0)) #wmbs self.type = int(cfg_params.get('WMBS.automation', 0)) self.taskType = 'fullySpecified' if self.type == 1: self.taskType = 'partiallySpecified' if common.scheduler.name().upper() in ['LSF', 'CAF']: self.credentialType = 'Token' self.copyTout = ' ' if common.scheduler.name().upper() == 'REMOTEGLIDEIN': msg = "FATAL ERROR: remoteGlidein scheduler requires use_server=0" raise CrabException(msg) Submitter.__init__(self, cfg_params, parsed_range, val) # init client server params... CliServerParams(self) # path fix if self.storage_path[0] != '/': self.storage_path = '/' + self.storage_path self.taskuuid = str(common._db.queryTask('name')) self.limitJobs = False return
def __init__(self, fields = '', formats = {}, eSurl = 'http://localhost:9200', chunk_size = 900): Submitter.__init__(self, fields, formats) #logging.info("eSSubmitter init: "+fields+" : "+str(formats)) self.eS = Elasticsearch(eSurl) # here we need to check if elasticsearch connection is alive, how? self.actions = [] self.chunk_size = chunk_size return
def __init__(self, cfg_params, jobs): self.cfg_params = cfg_params nj_list = [] nj_list = self.checkAllowedJob(jobs, nj_list) common.logger.info('Jobs ' + str(nj_list) + ' will be resubmitted') Submitter.__init__(self, cfg_params, nj_list, 'range') return
def __init__(self, cfg_params, jobs): self.cfg_params = cfg_params nj_list = [] nj_list = self.checkAllowedJob(jobs,nj_list) common.logger.info('Jobs '+str(nj_list)+' will be resubmitted') Submitter.__init__(self, cfg_params, nj_list, 'range') return
def __init__(self, cfg_params, jobs): self.cfg_params = cfg_params nj_list = [] self.copy_data = int(cfg_params.get('USER.copy_data',0)) self.check_RemoteDir = int(cfg_params.get('USER.check_user_remote_dir',0)) nj_list = self.checkAllowedJob(jobs,nj_list) common.logger.info('Jobs '+str(nj_list)+' will be resubmitted') Submitter.__init__(self, cfg_params, nj_list, 'range') return
def __init__(self, cfg_params, jobs): self.cfg_params = cfg_params nj_list = [] self.copy_data = int(cfg_params.get('USER.copy_data',0)) self.check_RemoteDir = int(cfg_params.get('USER.check_user_remote_dir',0)) if (jobs=='bad'): nj_list = self.checkBadJob(nj_list) else: nj_list = self.checkAllowedJob(jobs,nj_list) common.logger.info('Jobs '+str(nj_list)+' will be resubmitted') Submitter.__init__(self, cfg_params, nj_list, 'range') return
def Add(self, title, widget, comment): submit = Submitter(self._url) if self.constants: box = Container() box += widget for key in self.constants: box += HiddenField({'name': key, 'value': self.constants[key]}) submit += box else: submit += widget return PropsTable.Add(self, title, submit, comment)
def Add(self, title, widget, comment, use_submitter=True): if use_submitter: submit = Submitter(self.url) else: submit = Container() submit += widget # Add constants for key in self.constants: submit += HiddenField({'name': key, 'value': self.constants[key]}) # Append the widget PropsTable.Add(self, title, submit, comment)
def initializeActions_(self, opts): """ For each user action instantiate a corresponding object and put it in the action dictionary. """ for opt in opts.keys(): val = opts[opt] if (opt == '-create'): ncjobs = 0 if val: if (isInt(val)): ncjobs = int(val) elif (val == 'all'): ncjobs = val else: msg = 'Bad creation bunch size <' + str(val) + '>\n' msg += ' Must be an integer or "all"' msg += ' Generic range is not allowed"' raise SkimException(msg) pass else: ncjobs = 'all' if ncjobs != 0: # Instantiate Creator object self.creator = Creator(self.cfg_params, ncjobs) self.actions[opt] = self.creator pass pass elif (opt == '-submit'): self.actions[opt] = Submitter(self.cfg_params) return
def initializeActions_(self, opts): """ For each user action instantiate a corresponding object and put it in the action dictionary. """ for opt in opts.keys(): val = opts[opt] if ( opt == '-create' ): if self.flag_continue: msg = 'Cannot create an existing project. \n' raise CrabException(msg) if val and val != 'all': msg = 'Per default, CRAB will create all jobs as specified in the crab.cfg file, not the command line!' common.logger.info(msg) msg = 'Submission will still take into account the number of jobs specified on the command line!\n' common.logger.info(msg) ncjobs = 'all' #wmbs if int(self.automation) == 1: msg = 'Your jobs will be created on the server. Jobs informations will be known later.' common.logger.info(msg) # wmbs from Requestor import Requestor # Instantiate Creator object self.creator = Requestor(self.job_type_name, self.cfg_params, ncjobs) self.actions[opt] = self.creator # Create and initialize JobList common.job_list = JobList(1,self.creator.jobType()) else: from Creator import Creator # Instantiate Creator object self.creator = Creator(self.job_type_name, self.cfg_params, ncjobs) self.actions[opt] = self.creator # create jobs in the DB common._db.createJobs_(self.creator.nJobsL()) # Create and initialize JobList common.job_list = JobList(common._db.nJobs(), self.creator.jobType()) ## jobs specs not needed if using WMBS self.creator.writeJobsSpecsToDB() taskinfo={} taskinfo['cfgName'] = common.work_space.jobDir()+"/"+self.creator.jobType().configFilename() taskinfo['dataset'] = self.cfg_params['CMSSW.datasetpath'] common.job_list.setScriptNames(self.job_type_name+'.sh') common.job_list.setCfgNames(self.creator.jobType().configFilename()) common._db.updateTask_(taskinfo) pass elif ( opt == '-submit' ): ## Dealt with val == int so that -submit N means submit N jobs and not job # N if (self.UseServer== 1): from SubmitterServer import SubmitterServer self.actions[opt] = SubmitterServer(self.cfg_params, self.parseRange_(val), val) else: from Submitter import Submitter # Instantiate Submitter object self.actions[opt] = Submitter(self.cfg_params, self.parseRange_(val), val) # Create and initialize JobList if len(common.job_list) == 0 : common.job_list = JobList(common._db.nJobs(), None) pass pass elif ( opt == '-list' ): ''' Print the relevant infos of a range-all jobs/task ''' jobs = self.parseRange_(val) common._db.dump(jobs) pass elif ( opt == '-printId' ): ''' Print the unique name of the task if crab is used as client Print the SID list of all the jobs ''' #jid=False #if val == 'full': jid=True jid=True common._db.queryID(self.UseServer,jid) elif ( opt == '-status' ): if (self.UseServer== 1): from StatusServer import StatusServer self.actions[opt] = StatusServer(self.cfg_params, val) else: from Status import Status self.actions[opt] = Status(self.cfg_params, val) elif ( opt == '-kill' ): if val: if val =='all': jobs = common._db.nJobs("list") else: jobs = self.parseRange_(val) pass else: raise CrabException("Warning: with '-kill' you _MUST_ specify a job range or 'all'") pass if (self.UseServer== 1): from KillerServer import KillerServer self.actions[opt] = KillerServer(self.cfg_params,jobs) else: from Killer import Killer self.actions[opt] = Killer(self.cfg_params,jobs) elif ( opt == '-stopWorkflow' ): if self.UseServer==1 and int(self.automation)==1: from WorkflowHandler import WorkflowHandler self.actions[opt] = WorkflowHandler(self.cfg_params) else: raise CrabException("ERROR: you can use this command only if you are running an automated workflow using CRABSERVER") pass elif ( opt == '-getoutput' or opt == '-get'): if val=='all' or val==None or val=='': jobs = 'all' else: jobs = self.parseRange_(val) if (self.UseServer== 1): from GetOutputServer import GetOutputServer self.actions[opt] = GetOutputServer(self.cfg_params,jobs) else: from GetOutput import GetOutput self.actions[opt] = GetOutput(self.cfg_params,jobs) elif ( opt == '-resubmit' ): if val: if val=='all': jobs = common._db.nJobs('list') if val=='bad': jobs = 'bad' else: jobs = self.parseRange_(val) if (self.UseServer== 1): from ResubmitterServer import ResubmitterServer self.actions[opt] = ResubmitterServer(self.cfg_params, jobs) else: from Resubmitter import Resubmitter self.actions[opt] = Resubmitter(self.cfg_params, jobs) else: common.logger.info("Warning: with '-resubmit' you _MUST_ specify a job range or 'all'") common.logger.info("WARNING: _all_ job specified in the range will be resubmitted!!!") pass pass elif ( opt == '-forceResubmit' ): if val: if val=='all': jobs = common._db.nJobs('list') else: jobs = self.parseRange_(val) if (self.UseServer== 1): from ResubmitterServerForced import ResubmitterServerForced self.actions[opt] = ResubmitterServerForced(self.cfg_params, jobs) else: from ResubmitterForced import ResubmitterForced self.actions[opt] = ResubmitterForced(self.cfg_params, jobs) else: common.logger.info("Warning: with '-resubmit' you _MUST_ specify a job range or 'all'") common.logger.info("WARNING: _all_ job specified in the range will be resubmitted!!!") pass pass elif ( opt in ['-testJdl','-listMatch', '-match']): jobs = self.parseRange_(val) if len(jobs) != 0: # Instantiate Checker object from Checker import Checker self.actions[opt] = Checker(self.cfg_params, jobs) elif ( opt == '-postMortem' ): if val: jobs = self.parseRange_(val) if len( jobs ) > 1: raise CrabException("Only single job id allowed for %s command!" % opt ) else: raise CrabException("Warning: please specify a job id") pass if (self.UseServer== 1): from PostMortemServer import PostMortemServer self.actions[opt] = PostMortemServer(self.cfg_params,jobs) else: from PostMortem import PostMortem self.actions[opt] = PostMortem(self.cfg_params, jobs) elif ( opt == '-clean' ): if val != None: raise CrabException("No range allowed for '-clean'") if (self.UseServer== 1): from CleanerServer import CleanerServer self.actions[opt] = CleanerServer(self.cfg_params) else: from Cleaner import Cleaner self.actions[opt] = Cleaner(self.cfg_params) elif ( opt in ['-printJdl','-createJdl']): """ Materialize JDL """ ## Temporary: if opt == '-printJdl': common.logger.info("WARNING: -printJdl option is deprecated : please use -createJdl \n") if val =='all' or val == None or val == '': jobs = common._db.nJobs("list") else: jobs = self.parseRange_(val) pass from JdlWriter import JdlWriter self.actions[opt] = JdlWriter(self.cfg_params, jobs) elif ( opt == '-publish'): from Publisher import Publisher self.actions[opt] = Publisher(self.cfg_params) ### FEDE FOR PUBLICATION WITH NO INPUT FILES### elif ( opt == '-publishNoInp'): from Publisher import Publisher self.cfg_params['USER.no_inp'] = 1 self.actions[opt] = Publisher(self.cfg_params) elif ( opt == '-checkPublication' ): from InspectDBS import InspectDBS self.actions[opt] = InspectDBS(self.cfg_params) elif ( opt == '-copyData' ): if val =='all' or val == None or val == '': jobs = common._db.nJobs("list") else: jobs = self.parseRange_(val) if (self.UseServer== 1): from StatusServer import StatusServer status = StatusServer(self.cfg_params) else: # from Status import Status # status = Status(self.cfg_params) status=None from CopyData import CopyData self.actions[opt] = CopyData(self.cfg_params, jobs,status) elif ( opt == '-validateCfg' ): from ValidateCfg import ValidateCfg config= {'pset' : self.cfg_params.get('CMSSW.pset','None')} if val : config['pset']=val self.actions[opt] = ValidateCfg(config) elif ( opt == '-renewCredential' ): if (self.UseServer== 1): from CredentialRenew import CredentialRenew self.actions[opt] = CredentialRenew(self.cfg_params) else: msg = "The option [-renewProxy] can be used only with the server modality!" raise CrabException(msg) elif ( opt == '-report' ): if (self.UseServer== 1): from StatusServer import StatusServer StatusServer(self.cfg_params).query(display=False) else: # cause a core dump.... #from Status import Status #Status(self.cfg_params).query(display=False) pass from Reporter import Reporter self.actions[opt] = Reporter(self.cfg_params) elif ( opt == '-cleanCache' ): from CacheCleaner import CacheCleaner self.actions[opt] = CacheCleaner() elif ( opt == '-uploadLog' ): jobid = -1 jobs = self.parseRange_(val) if len( jobs ) > 1: common.logger.info("Only single job id allowed for %s command!" % opt ) elif len (jobs) == 1: jobid = jobs[0] from ReportUploader import ReportUploader self.actions[opt] = ReportUploader( self.cfg_params, jobid ) pass return