def main(): """ Main """ options = getOptions() # If you want crabCommand to be quiet: #from CRABClient.UserUtilities import setConsoleLogLevel, LOGLEVEL_MUTE #setConsoleLogLevel(LOGLEVEL_MUTE) # With this function you can change the console log level at any time. # To retrieve the current crabCommand console log level: #from CRABClient.UserUtilities import getConsoleLogLevel #crabConsoleLogLevel = getConsoleLogLevel() # If you want to retrieve the CRAB loggers: #from CRABClient.UserUtilities import getLoggers #crabLoggers = getLoggers() # Execute the command with its arguments for each directory inside the work area. for dir in os.listdir(options.workArea): projDir = os.path.join(options.workArea, dir) if not os.path.isdir(projDir): continue # Execute the crab command. msg = "Executing (the equivalent of): crab %s --dir %s %s" % (options.crabCmd, projDir, options.crabCmdOpts) print "-"*len(msg) print msg print "-"*len(msg) try: crabCommand(options.crabCmd, dir = projDir, *options.crabCmdOpts.split()) except HTTPException as hte: print "Failed executing command %s for task %s: %s" % (options.crabCmd, projDir, hte.headers) except ClientException as cle: print "Failed executing command %s for task %s: %s" % (options.crabCmd, projDir, cle)
def submit(dataset, opt): c = copy.deepcopy(config) c.JobType.scriptArgs = ['configuration=%s' % os.path.basename(options.configuration)] c.JobType.inputFiles += [options.configuration] c.General.requestName = opt['name'] c.Data.publishDataName = opt['name'] c.Data.inputDataset = dataset c.Data.unitsPerJob = opt['units_per_job'] print("Submitting new task %r" % opt['name']) print("\tDataset: %s" % dataset) if options.data: c.Data.runRange = '%d-%d' % (opt['run_range'][0], opt['run_range'][1]) c.Data.lumiMask = opt['certified_lumi_file'] if 'certified_lumi_file' in opt else\ 'https://cms-service-dqm.web.cern.ch/cms-service-dqm/CAF/certification/Collisions15/13TeV/Cert_246908-247381_13TeV_PromptReco_Collisions15_ZeroTesla_JSON.txt' # Create output file in case something goes wrong with submit with open('crab_' + opt['name'] + '.py', 'w') as f: f.write(str(c)) if options.submit: crabCommand('submit', config=c) else: print('Configuration file saved as %r' % ('crab_' + opt['name'] + '.py'))
def submit(config): try: crabCommand("submit", config=config) except HTTPException as hte: log.error("Failed submitting task: %s" % (hte.headers)) except ClientException as cle: log.error("Failed submitting task: %s" % (cle))
def submit(self): ## submit the jobs (comand line would be: "crab submit config_name.py" try: crabCommand('submit', config = self.config , proxy=os.environ['X509_USER_PROXY']) except HTTPException as hte: print "Failed submitting task: %s" % (hte.headers) except ClientException as cle: print "Failed submitting task: %s" % (cle)
def MonitoringJobs(tasklist): while True: sumFailed = 0 sumComp = 0 for request, name in tasklist.items(): dirname = './%s/crab_%s' % (workArea, name) fulldir = os.path.abspath(dirname) try: results = crabCommand('status', dir=fulldir) if 'FAILED' in results['status']: sumFailed += 1 if 'COMPLETED' in results['status']: sumComp += 1 print "For task", request, "the job states are", results['jobsPerStatus'] status = results['jobsPerStatus'] if 'failed' in status: print "failed : ", status['failed'] crabCommand('resubmit', dir=fulldir) except: pass time.sleep(2) print "\n\n", colors.RED, "sumFailed : ", sumFailed, " sumComp : ", sumComp, " RE-CHECKING EVERY TASK...\n\n", colors.NORMAL if sumFailed == 0 and sumComp == len(tasklist): break;
def submit(config): try: crabCommand('submit', config = config) except HTTPException as hte: print "Failed submitting task: %s" % (hte.headers) except ClientException as cle: print "Failed submitting task: %s" % (cle)
def crab_command(command): for dir in glob('/nfs/dust/cms/user/%s/kappa/crab_kappa_skim76X-%s/*'%(getUsernameFromSiteDB(), date)): #for dir in glob('/net/scratch_cms/institut_3b/%s/kappa/crab_kappa_skim-%s/*'%(getUsernameFromSiteDB(), date)): try: crabCommand(command, dir = dir) except HTTPException as hte: print hte
def submit(config): try: crabCommand('submit', config = config) except HTTPException, hte: print 'Cannot execute commend' print hte.headers
def submit(inconfig): try: crabCommand("submit", config=inconfig) # crabCommand('status') except HTTPException as hte: print "Failed submitting task: %s" % (hte.headers) except ClientException as cle: print "Failed submitting task: %s" % (cle)
def submit(config, dryrunBool): try: crabCommand("submit", config=config, dryrun=dryrunBool) except HTTPException as hte: print str(hte) print "\n{}\nERROR: failed to submit task due to HTTPException.\n{}".format(hte, hte.headers) except ClientException as cle: print "ERROR: failed to submit task due to ClientException.\n{}".format(cle)
def crabSubmit(config): try: crabCommand('submit', config = config) except HTTPException, hte: print '-----> there was a problem. see below.' print hte.headers print 'quit here' q.put(-1)
def submit(config): try: crabCommand('submit', config = config) #crabCommand('submit', config = config, dryrun = True) # Debug purposes except HTTPException as hte: print "Failed submitting task: %s" % (hte.headers) except ClientException as cle: print "Failed submitting task: %s" % (cle)
def flush_queue(self): if self.job_mode == 'interactive': pool = Pool(processes=self.parallel) result = pool.map( partial(run_command, self.dry_run), self.job_queue) script_list = [] if self.job_mode in ['script', 'lxbatch']: for i, j in enumerate(range(0, len(self.job_queue), self.merge)): script_name = 'job_%s_%i.sh' % (self.task_name, i) # each job is given a slice from the list of combine commands of length 'merge' # we also keep track of the files that were created in case submission to a # batch system was also requested self.create_job_script( self.job_queue[j:j + self.merge], script_name, self.job_mode == 'script') script_list.append(script_name) if self.job_mode == 'lxbatch': for script in script_list: full_script = os.path.abspath(script) logname = full_script.replace('.sh', '_%J.log') run_command(self.dry_run, 'bsub -o %s %s %s' % (logname, self.bopts, full_script)) if self.job_mode == 'crab3': #import the stuff we need from CRABAPI.RawCommand import crabCommand from httplib import HTTPException print '>> crab3 requestName will be %s' % self.task_name outscriptname = 'crab_%s.sh' % self.task_name print '>> crab3 script will be %s' % outscriptname outscript = open(outscriptname, "w") outscript.write(CRAB_PREFIX) jobs = 0 wsp_files = set() for line in self.job_queue: jobs += 1 newline = line if line.startswith('combine'): newline = line.replace('combine', './combine', 1) wsp = self.extract_workspace_arg(newline.split()) wsp_files.add(wsp) outscript.write('\nif [ $1 -eq %i ]; then\n' % jobs) outscript.write(' ' + newline.replace(wsp, os.path.basename(wsp)) + '\n') outscript.write('fi') outscript.write(CRAB_POSTFIX) outscript.close() from HiggsAnalysis.HiggsToTauTau.combine.crab import config config.General.requestName = self.task_name config.JobType.scriptExe = outscriptname config.JobType.inputFiles.extend(wsp_files) config.Data.totalUnits = jobs config.Data.publishDataName = config.General.requestName if self.custom_crab is not None: d = {} execfile(self.custom_crab, d) d['custom_crab'](config) print config if not self.dry_run: try: crabCommand('submit', config = config) except HTTPException, hte: print hte.headers
def master_kill(self): """ Kills a job & subjobs """ job = self.getJobObject() from CRABAPI.RawCommand import crabCommand from CRABClient.ClientExceptions import ConfigurationException import httplib if not job.backend.requestname: logger.warning("Couldn't find request name for job %s. Skipping" % s) return False crab_work_dir = os.path.join(job.outputdir, job.backend.requestname) try: crabCommand('kill', dir = crab_work_dir, proxy = '/data/hc/apps/cms/config/x509up_production2') if len(job.subjobs): for s in job.subjobs: if not s.status in ['completed','failed']: s.updateStatus('killed') else: if not job.status in ['completed','failed']: job.updateStatus('killed') job.updateMasterJobStatus() except httplib.HTTPException as e: logger.error("Error while killing job %s" % job.id) logger.error(e.headers) logger.error(e.result) return False except ConfigurationException as ce: # From CRAB3 error message: Error loading CRAB cache file. Try to do 'rm -rf /root/.crab3' and run the crab command again. import subprocess import uuid randomstring = str(uuid.uuid4().get_hex().upper()[0:6]) subprocess.call(["mv", "/root/.crab3", "/tmp/.crab3."+randomstring]) try: statusresult = crabCommand('kill', dir = crab_work_dir, proxy = '/data/hc/apps/cms/config/x509up_production2') if len(job.subjobs): for s in job.subjobs: if not s.status in ['completed','failed']: s.updateStatus('killed') else: if not job.status in ['completed','failed']: job.updateStatus('killed') job.updateMasterJobStatus() except httplib.HTTPException as e: logger.error(e.headers) logger.error(e.result) return True
def GetTaskReports(datasetPath, status, dashboardURL, verbose=False): ''' ''' # Variable Declaration reports = [] # Get all files under <dataset_dir>/results/ files = execute("ls %s" % os.path.join( datasetPath, "results") ) try: if verbose: print "\t Executing \"crab status\" command" # Execute "crab status --dir=d" result = crabCommand('status', dir = datasetPath) # Assess JOB success/failure for task finished, failed, retrievedLog, retrievedOut = retrievedFiles(datasetPath, result, False) # Proceed according to the job status if retrievedLog < finished: touch(datasetPath) dummy = crabCommand('getlog', dir = datasetPath) #xenios if retrievedOut < finished: dummy = crabCommand('getoutput', dir = datasetPath) #xenios touch(datasetPath) if failed > 0: print "\t Found \"Failed\" jobs for task \"%s\". Executing command \"crab resubmit --dir=\"%s\"" % ( os.path.basename(datasetPath), datasetPath ) dummy = crabCommand('resubmit', dir = datasetPath) # Assess JOB success/failure for task (again) finished, failed, retrievedLog, retrievedOut = retrievedFiles(datasetPath, result, True) retrieved = min(finished, retrievedLog, retrievedOut) alljobs = len(result['jobList']) # Append the report reports.append( Report(datasetPath, alljobs, retrieved, status, dashboardURL) ) # Determine if task is DONE or not if retrieved == alljobs and retrieved > 0: absolutePath = os.path.join(datasetPath, "crab.log") os.system("sed -i -e '$a\DONE! (Written by multicrabGet.py)' %s" % absolutePath ) # Catch exceptions (Errors detected during execution which may not be "fatal") except: #if 0: msg = sys.exc_info()[1] reports.append( Report(datasetPath, "?", "?", "?", dashboardURL) ) print "\t The \"crab status\" command failed with exception \"%s\"" % ( msg ) if verbose: print "\t Re-executing \"crab status\" command, this time with full verbosity" setConsoleLogLevel(1) res = crabCommand('status', dir = datasetPath) return reports
def submit(args): config, jobfile = args[0], args[1] try: crabCommand("submit", config=config) except HTTPException as hte: log.error("Failed submitting task: %s" % (hte.headers)) except ClientException as cle: log.error("Failed submitting task: %s" % (cle)) os.remove(jobfile)
def master_submit(self, rjobs, subjobconfigs, masterjobconfig): """Perform de submission of the master job (the CRAB task).""" job = self.getJobObject() # DEBUG logger.info("Same? %s %s" % (rjobs[0].id, job.id)) if rjobs[0]: job_config = self.prepare_job_config(job) # DEBUG for section in job_config._internal_sections: section = getattr(job_config, section) print section.dictionary_() from CRABAPI.RawCommand import crabCommand from CRABClient.ClientExceptions import ConfigurationException import httplib try: res = crabCommand('submit', config = job_config, proxy = '/data/hc/apps/cms/config/x509up_production2') job.backend.requestname = res['requestname'] job.backend.taskname = res['uniquerequestname'] job.updateStatus('submitted') except httplib.HTTPException as e: logger.error(e.headers) logger.error(e.result) return False except ConfigurationException as ce: # From CRAB3 error message: Error loading CRAB cache file. Try to do 'rm -rf /root/.crab3' and run the crab command again. import subprocess import uuid randomstring = str(uuid.uuid4().get_hex().upper()[0:6]) subprocess.call(["mv", "/root/.crab3", "/tmp/.crab3."+randomstring]) try: statusresult = crabCommand('submit', config = job_config, proxy = '/data/hc/apps/cms/config/x509up_production2') logger.info("CRAB3 Status result: %s" % statusresult) job.backend.requestname = res['requestname'] job.backend.taskname = res['uniquerequestname'] job.updateStatus('submitted') except httplib.HTTPException as e: logger.error(e.headers) logger.error(e.result) else: logger.warning('No rjobs found') return True
def crabSubmit(config,dryRun=False): try: if dryRun: print 'crabSubmit(): doing crab3 dryrun' # crabCommand('submit',dryrun=dryRun,config = config) #else: # #print "crabSubmit(): calling crabCommand('submit',config=config)" # crabCommand('submit',config = config) crabCommand('submit',dryrun=dryRun,config = config) except HTTPException, hte: print '-----> there was a problem. see below.' print hte.headers print 'quit here' exit(-1)
def crab_kill(self): try: out = crabCommand('kill', dir=self.sample["crab"]["taskdir"], proxy=u.get_proxy_file()) except Exception as e: self.do_log("ERROR killing: "+str(e)) return 0 return out["status"] == "SUCCESS"
def crab_status(self): if self.sample["nevents_DAS"] == 0: self.sample["nevents_DAS"] = u.dataset_event_count(self.sample["dataset"])["nevents"] try: if self.fake_status: # out = {'ASOURL': 'https://cmsweb.cern.ch/couchdb2', 'collector': 'cmssrv221.fnal.gov,vocms099.cern.ch', 'failedJobdefs': 0, # 'jobList': [['running', 1], ['running', 3], ['running', 2], ['running', 5], ['running', 4], ['running', 7], ['idle', 6], ['running', 8]], 'jobdefErrors': [], # 'jobs': {'1': {'State': 'running'}, '2': {'State': 'running'}, '3': {'State': 'running'}, '4': {'State': 'running'}, # '5': {'State': 'running'}, '6': {'State': 'idle'}, '7': {'State': 'running'}, '8': {'State': 'running'}}, # 'jobsPerStatus': {'idle': 1, 'running': 7}, 'outdatasets': None, 'publication': {}, 'publicationFailures': {}, 'schedd': '*****@*****.**', # 'status': 'SUBMITTED', 'statusFailureMsg': '', 'taskFailureMsg': '', 'taskWarningMsg': [], 'totalJobdefs': 0} out = {'ASOURL': 'https://cmsweb.cern.ch/couchdb2', 'collector': 'cmssrv221.fnal.gov,vocms099.cern.ch', 'failedJobdefs': 0, 'jobList': [['finished', 1], ['finished', 3], ['finished', 2], ['finished', 5], ['finished', 4], ['finished', 7], ['finished', 6], ['finished', 8]], 'jobdefErrors': [], 'jobs': {'1': {'State': 'finished'}, '2': {'State': 'finished'}, '3': {'State': 'finished'}, '4': {'State': 'finished'}, '5': {'State': 'finished'}, '6': {'State': 'finished'}, '7': {'State': 'finished'}, '8': {'State': 'finished'}}, 'jobsPerStatus': {'finished': 8}, 'outdatasets': None, 'publication': {}, 'publicationFailures': {}, 'schedd': '*****@*****.**', 'status': 'COMPLETED', 'statusFailureMsg': '', 'taskFailureMsg': '', 'taskWarningMsg': [], 'totalJobdefs': 0} else: out = crabCommand('status', dir=self.sample["crab"]["taskdir"], proxy=u.get_proxy_file(), json=True) self.crab_status_res = out return 1 # succeeded except Exception as e: self.do_log("ERROR getting status: "+str(e)) return 0 # failed
def crab_resubmit(self): try: out = crabCommand('resubmit', dir=self.sample["crab"]["taskdir"], proxy=u.get_proxy_file()) return out["status"] == "SUCCESS" except Exception as e: self.do_log("ERROR resubmitting "+str(e)) return 0 # failed
def crab_command(*args, **kwargs): # Only call this once per process -- if you want to do tasks in # parallel, use multiprocessing, not threads. See # crab_multiprocess below. def from_kwargs(key, default): if kwargs.has_key(key): val = kwargs[key] del kwargs[key] return val else: return default cache_file = from_kwargs("cache_file", "/tmp/crab3.%i.%s" % (os.getpid(), str(int(time.time() * 1e6)))) old_cache_file = os.environ.get("CRAB3_CACHE_FILE", "") open(cache_file, "wt").write('{"crab_project_directory": ""}') os.environ["CRAB3_CACHE_FILE"] = cache_file suppress_stdout = from_kwargs("suppress_stdout", True) if suppress_stdout: old_stdout = sys.stdout sys.stdout = buf = StringIO() try: result = crabCommand(*args, **kwargs) except httplib.HTTPException, e: result = {} result["jobList"] = [] result["HTTPException"] = e result["status"] = "HTTPException"
def crabCommandProcess(q,crabCommandArgs): # give crab3 the chance for one server glitch i=0 while True: i+=1 try: res = crabCommand(*crabCommandArgs) break except HTTPException as e: print("crab error ---------------") print(e) print("end error ---------------") print("will try again!") import time time.sleep(5) except CachefileNotFoundException as e: print("crab error ---------------") print(e) print("end error ---------------") print(crabCommandArgs) res={ 'status':"CachefileNotFound",'jobs':{}} break if i>5: res={ 'status':"UnexpectedError",'jobs':{}} break q.put( res )
def crabCommandProcess(q, crabCommandArgs): # give crab3 the chance for one server glitch i = 0 while True: i += 1 try: res = crabCommand(*crabCommandArgs) break except HTTPException as e: print "crab error ---------------" print e print "end error ---------------" print "will try again!" import time time.sleep(5) except CachefileNotFoundException as e: print "crab error ---------------" print e print "end error ---------------" print crabCommandArgs res = {"status": "CachefileNotFound", "jobs": {}} break if i > 5: res = {"status": "YouAreFuckedByCrab", "jobs": {}} break q.put(res)
def checkwrite(self,site='T2_DE_RWTH',path='noPath'): if self.username is None: self.checkusername() try: self.logger.info( "Checking if user can write to /store/user/%s on site %s with voGroup %s"%(self.username,site , self.voGroup) ) if not 'noPath' in path: res = crabCommand('checkwrite','--site',site,'--voGroup',self.voGroup,'--lfn', path) else: res = crabCommand('checkwrite','--site',site,'--voGroup',self.voGroup) if res['status'] == 'SUCCESS': self.logger.info("Checkwrite was sucessfully called.") return True else: self.logger.error( "The crab checkwrite command failed for site: %s"%site ) return False except: self.logger.error( 'Unable to perform crab checkwrite') return False
def submit(config, q): try: res = crabCommand('submit', '--proxy=/tmp/x509up_u%d' % os.getuid(), config = config) q.put(res) except HTTPException as hte: print "Failed submitting task: %s" % (hte.headers) except ClientException as cle: print "Failed submitting task: %s" % (cle)
def main(): """ Main """ options = getOptions() # Execute the command with its arguments for each task. for task in os.listdir(options.projDir): task = os.path.join(options.projDir, task) if not os.path.isdir(task): continue try : print ("Executing (the equivalent of): crab %s %s %s" % (options.crabCmd, task, options.crabCmdOptions)) crabCommand(options.crabCmd, task, *options.crabCmdOptions.split()) except HTTPException, hte : print 'Command not executed'
def submit(config): ### for some reason only the first dataset is submitted correctly, work around if len(sys.argv) ==1: ## book the command and run python cmd = "python " + sys.argv[0] + " '" + config.General.requestName + "'" print "calling: "+cmd call(cmd,shell=True) return if len(sys.argv) > 1: ## if it is not in the request try the next if sys.argv[1] != config.General.requestName: return ### print "--- Submitting " + "\033[01;32m" + config.Data.inputDataset.split('/')[1] + "\033[00m" + " ---" try: crabCommand('submit', config = config) except HTTPException as hte: print "Failed submitting task: %s" % (hte.headers) except ClientException as cle: print "Failed submitting task: %s" % (cle)
def crab_cmd(self, configuration, process_queue=None): try: output = crabCommand(configuration["cmd"], **configuration["args"]) if process_queue: process_queue.put(output) return output except HTTPException as hte: print "Failed", configuration["cmd"], "of the task: %s" % (hte.headers) except ClientException as cle: print "Failed", configuration["cmd"], "of the task: %s" % (cle)
def submit(dataset, opt): c = copy.deepcopy(config) c.JobType.psetName = options.psetName # get the name of the output file out of there filename = options.psetName directory, module_name = os.path.split(filename) module_name = os.path.splitext(module_name)[0] path = list(sys.path) sys.path.insert(0, directory) try: module = __import__(module_name) finally: sys.path[:] = path # restore c.JobType.outputFiles.append(module.process.framework.output.value()) c.General.requestName = opt["name"] c.Data.publishDataName = opt["name"] c.Data.inputDataset = dataset c.Data.unitsPerJob = opt["units_per_job"] print("Submitting new task %r" % opt["name"]) print("\tDataset: %s" % dataset) if options.data: c.Data.runRange = "%d-%d" % (opt["run_range"][0], opt["run_range"][1]) c.Data.lumiMask = ( opt["certified_lumi_file"] if "certified_lumi_file" in opt else "https://cms-service-dqm.web.cern.ch/cms-service-dqm/CAF/certification/Collisions15/13TeV/Cert_246908-247381_13TeV_PromptReco_Collisions15_ZeroTesla_JSON.txt" ) # Create output file in case something goes wrong with submit with open("crab_" + opt["name"] + ".py", "w") as f: f.write(str(c)) if options.submit: crabCommand("submit", config=c) else: print("Configuration file saved as %r" % ("crab_" + opt["name"] + ".py"))
def runCrabCommand(command, *args, **kwargs): try: return crabCommand(command, *args, **kwargs) except Exception as e: logger.error(getattr(e, 'message', repr(e)))
def main(): if len(sys.argv) == 1: usage() dirs = sys.argv[1:] datasetdirs = [] for d in dirs: if os.path.exists(d) and os.path.isdir(d): datasetdirs.append(os.path.abspath(d)) if len(dirs) == 0: datasetdirs.append(os.path.abspath(".")) datasets = [] for d in datasetdirs: if os.path.exists(os.path.join(d, "results")): datasets.append(d) cands = execute("ls -tr %s" % d) for c in cands: path = os.path.join(d, c) if os.path.exists(os.path.join(path, "results")): datasets.append(path) class Report: def __init__(self, name, all, retrieved): self.name = name self.all = str(all) self.retrieved = str(retrieved) def Print(self): name = os.path.basename(self.name) while len(name) < 30: name += " " print self.name, "retrieved =", self.retrieved, ", all jobs =", self.all reports = [] for d in datasets: print print os.path.basename(d) if os.system("grep Done %s" % os.path.join( d, "crab.log")) == 0: # If Done in the crab.log, skip. # print d,"done, skipping.." continue files = execute("ls %s" % os.path.join(d, "results")) try: # if 1 > 0: res = crabCommand('status', dir=d) finished, failed, retrievedLog, retrievedOut = retrievedFiles( d, res) if retrievedLog < finished: touch(d) dummy = crabCommand('getlog', dir=d) if retrievedOut < finished: dummy = crabCommand('getoutput', dir=d) touch(d) if failed > 0: dummy = crabCommand('resubmit', dir=d) finished, failed, retrievedLog, retrievedOut = retrievedFiles( d, res) retrieved = min(finished, retrievedLog, retrievedOut) alljobs = len(res['jobList']) # if len(files) == 0: # print "check len(files) == 0",d,alljobs,retrieved # print "Check reposrt" reports.append(Report(d, alljobs, retrieved)) if retrieved == alljobs and retrieved > 0: os.system( "sed -i -e '$a\Done. (Written by multicrabget.py)' %s" % os.path.join(d, "crab.log") ) # Printing 'Done.' in crab.log when all output is retrieved. except: reports.append(Report(d, "?", "?")) print "crab status command failed, skipping.." for r in reports: r.Print()
primaryName = primaryName.replace("_13TeV", "") # # TO DO: Fix This # secondaryName = dataset.split('/')[2] secondaryName = secondaryName.replace( "RunIIFall17NanoAODv6-", "MC17NanoAODv6") #RENAME CAMPAIGN. CHECK ITS UPDATED secondaryName = secondaryName.replace( "_Nano25Oct2019", "") #RENAME CAMPAIGN. CHECK ITS UPDATED secondaryName = secondaryName.replace( "_102X_mc2017_realistic_v7", "") #REMOVE GT. CHECK ITS UPDATED secondaryName = secondaryName.replace("PU2017_12Apr2018", "") #CHECK secondaryName = secondaryName.replace("PU2017RECOSIMstep_12Apr2018", "") #CHECK secondaryName = secondaryName.replace("-v1", "") # secondaryName = secondaryName.replace( "-v2", "" ) # Remove any version indication.There should only be one valid version for MC samples # requestName = primaryName + "_" + secondaryName requestName = crab_common.reqNamePrefix + "_" + requestName + "_" + crab_common.version crab_common.config.General.requestName = requestName # outputDatasetTag = crab_common.reqNamePrefix + "_" + secondaryName + "_" + crab_common.version crab_common.config.Data.outputDatasetTag = outputDatasetTag # print "requestName: ", requestName print "outputDatasetTag: ", outputDatasetTag crabCommand('submit', config=crab_2016_common.config) print ""
config.Data.outputDatasetTag = out_dataset_tag config.Data.outLFNDirBase = storage_location config.Site.storageSite = 'T3_US_FNALLPC' from CRABAPI.RawCommand import crabCommand inputDatasets = open(samples_list, "r") for dataset in inputDatasets: if dataset[0] == "#": continue dataset = dataset.strip() print(dataset) config.General.requestName = (dataset.split("/")[1] + "_" + dataset.split("/")[2].split("-")[0])[:100] print(config.General.requestName) config.Data.inputDataset = dataset if print_config: print(config) if just_print_config: continue if dryrun: crabCommand('submit', '--dryrun', config=config) else: crabCommand('submit', config=config) if just_first_sample: break
def main(): args = parser() crabConf = crabConfig(args.MHc, args.Mh, args.year) crabCommand("submit", config=crabConf)
def flush_queue(self): if self.job_mode == 'interactive': pool = Pool(processes=self.parallel) result = pool.map( partial(run_command, self.dry_run, pre_cmd=self.pre_cmd), self.job_queue) script_list = [] if self.job_mode in ['script', 'lxbatch', 'SGE']: if self.prefix_file != '': if self.prefix_file.endswith('.txt'): job_prefix_file = open(self.prefix_file,'r') else : job_prefix_file = open(os.environ['CMSSW_BASE']+"/src/CombineHarvester/CombineTools/input/job_prefixes/job_prefix_"+self.prefix_file+".txt",'r') global JOB_PREFIX JOB_PREFIX=job_prefix_file.read() %({ 'CMSSW_BASE': os.environ['CMSSW_BASE'], 'SCRAM_ARCH': os.environ['SCRAM_ARCH'], 'PWD': os.environ['PWD'] }) job_prefix_file.close() for i, j in enumerate(range(0, len(self.job_queue), self.merge)): script_name = 'job_%s_%i.sh' % (self.task_name, i) # each job is given a slice from the list of combine commands of length 'merge' # we also keep track of the files that were created in case submission to a # batch system was also requested self.create_job_script( self.job_queue[j:j + self.merge], script_name, self.job_mode == 'script') script_list.append(script_name) if self.job_mode == 'lxbatch': for script in script_list: full_script = os.path.abspath(script) logname = full_script.replace('.sh', '_%J.log') run_command(self.dry_run, 'bsub -o %s %s %s' % (logname, self.bopts, full_script)) if self.job_mode == 'SGE': for script in script_list: full_script = os.path.abspath(script) logname = full_script.replace('.sh', '_%J.log') run_command(self.dry_run, 'qsub -o %s %s %s' % (logname, self.bopts, full_script)) if self.job_mode == 'condor': outscriptname = 'condor_%s.sh' % self.task_name subfilename = 'condor_%s.sub' % self.task_name print '>> condor job script will be %s' % outscriptname outscript = open(outscriptname, "w") outscript.write(JOB_PREFIX) jobs = 0 wsp_files = set() for i, j in enumerate(range(0, len(self.job_queue), self.merge)): outscript.write('\nif [ $1 -eq %i ]; then\n' % jobs) jobs += 1 for line in self.job_queue[j:j + self.merge]: newline = line outscript.write(' ' + newline + '\n') outscript.write('fi') outscript.close() st = os.stat(outscriptname) os.chmod(outscriptname, st.st_mode | stat.S_IEXEC) subfile = open(subfilename, "w") condor_settings = CONDOR_TEMPLATE % { 'EXE': outscriptname, 'TASK': self.task_name, 'EXTRA': self.bopts.decode('string_escape'), 'NUMBER': jobs } subfile.write(condor_settings) subfile.close() run_command(self.dry_run, 'condor_submit %s' % (subfilename)) if self.job_mode == 'crab3': #import the stuff we need from CRABAPI.RawCommand import crabCommand from httplib import HTTPException print '>> crab3 requestName will be %s' % self.task_name outscriptname = 'crab_%s.sh' % self.task_name print '>> crab3 script will be %s' % outscriptname outscript = open(outscriptname, "w") outscript.write(CRAB_PREFIX) jobs = 0 wsp_files = set() for extra in self.crab_files: wsp_files.add(extra) for i, j in enumerate(range(0, len(self.job_queue), self.merge)): jobs += 1 outscript.write('\nif [ $1 -eq %i ]; then\n' % jobs) for line in self.job_queue[j:j + self.merge]: newline = line if line.startswith('combine'): newline = self.pre_cmd + line.replace('combine', './combine', 1) wsp = str(self.extract_workspace_arg(newline.split())) newline = newline.replace(wsp, os.path.basename(wsp)) if wsp.startswith('root://'): newline = ('./copyRemoteWorkspace.sh %s ./%s; ' % (wsp, os.path.basename(wsp))) + newline else: wsp_files.add(wsp) outscript.write(' ' + newline + '\n') outscript.write('fi') if self.custom_crab_post is not None: with open(self.custom_crab_post, 'r') as postfile: outscript.write(postfile.read()) else: outscript.write(CRAB_POSTFIX) outscript.close() from CombineHarvester.CombineTools.combine.crab import config config.General.requestName = self.task_name config.JobType.scriptExe = outscriptname config.JobType.inputFiles.extend(wsp_files) config.Data.totalUnits = jobs config.Data.outputDatasetTag = config.General.requestName if self.memory is not None: config.JobType.maxMemoryMB = self.memory if self.crab_area is not None: config.General.workArea = self.crab_area if self.custom_crab is not None: d = {} execfile(self.custom_crab, d) d['custom_crab'](config) print config if not self.dry_run: try: crabCommand('submit', config = config) except HTTPException, hte: print hte.headers
def main(): options = getOptions() # The submit command needs special treatment. if options.crabCmd == 'submit': # External files needed by CRAB inputDir = '/afs/cern.ch/user/k/kmcdermo/public/input/' inputPaths = 'HLTpathsWExtras.txt' inputFilters = 'HLTfilters.txt' inputFlags = 'METflags.txt' #-------------------------------------------------------- # This is the base config: #-------------------------------------------------------- from CRABClient.UserUtilities import config config = config() config.General.workArea = options.workArea config.General.requestName = None config.JobType.pluginName = 'Analysis' config.JobType.psetName = 'dispho.py' config.JobType.numCores = 8 config.JobType.pyCfgParams = None config.JobType.inputFiles = [ inputDir + inputPaths, inputDir + inputFilters, inputDir + inputFlags ] config.Data.inputDataset = None config.Data.splitting = 'EventAwareLumiBased' config.Data.unitsPerJob = 500000 config.Data.allowNonValidInputDataset = True config.Data.outputDatasetTag = None config.Data.publication = False config.Site.storageSite = 'T2_CH_CERN' config.Data.outLFNDirBase = '/store/group/phys_exotica/displacedPhotons/nTuples/2017/analysis/unskimmed' #-------------------------------------------------------- # Will submit one task for each of these input datasets. inputDataAndOpts = [ #### GJets HT Binned #### [ '/GJets_HT-40To100_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v2/MINIAODSIM', '18620' ], [ '/GJets_HT-100To200_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM', '8625' ], [ '/GJets_HT-200To400_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM', '2196' ], [ '/GJets_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM', '258' ], [ '/GJets_HT-600ToInf_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM', '85.18' ], #### QCD HT Binned #### [ '/QCD_HT100to200_TuneCP5_13TeV-madgraph-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14_ext1-v1/MINIAODSIM', '23670000' ], [ '/QCD_HT200to300_TuneCP5_13TeV-madgraph-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM', '1554000' ], [ '/QCD_HT300to500_TuneCP5_13TeV-madgraph-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM', '323800' ], [ '/QCD_HT500to700_TuneCP5_13TeV-madgraph-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v2/MINIAODSIM', '30010' ], [ '/QCD_HT700to1000_TuneCP5_13TeV-madgraph-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM', '6352' ], [ '/QCD_HT1000to1500_TuneCP5_13TeV-madgraph-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM', '1096' ], [ '/QCD_HT1500to2000_TuneCP5_13TeV-madgraph-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v2/MINIAODSIM', '99.12' ], [ '/QCD_HT2000toInf_TuneCP5_13TeV-madgraph-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v2/MINIAODSIM', '20.20' ], #### Drell-Yan #### [ '/DYJetsToLL_M-50_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIFall17MiniAODv2-PU2017RECOSIMstep_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM', '5349' ], #### Diphoton NLO #### [ '/DiPhotonJetsBox_M40_80-Sherpa/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v2/MINIAODSIM', '308.3 ' ], [ '/DiPhotonJetsBox_MGG-80toInf_13TeV-Sherpa/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v2/MINIAODSIM', '87.54' ], #### Top+X #### [ '/TTJets_TuneCP5_13TeV-amcatnloFXFX-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM', '726.5' ], [ '/TGJets_TuneCP5_13TeV_amcatnlo_madspin_pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v2/MINIAODSIM', '3.055' ], [ '/TTGJets_TuneCP5_13TeV-amcatnloFXFX-madspin-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM', '4.106' ], [ '/ttZJets_TuneCP5_13TeV_madgraphMLM_pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM', '0.5426' ], [ '/ttWJets_TuneCP5_13TeV_madgraphMLM_pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM', '0.4607' ], [ '/TGGJets_TuneCP5_PSweights_13TeV-MadGraph-madspin-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM', '0.007726' ], #### W+X #### [ '/WJetsToLNu_HT-100To200_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v2/MINIAODSIM', '1395' ], [ '/WJetsToLNu_HT-200To400_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM', '409.3' ], [ '/WJetsToLNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM', '57.91' ], [ '/WJetsToLNu_HT-600To800_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM', '12.93' ], [ '/WJetsToLNu_HT-800To1200_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM', '5.935' ], [ '/WJetsToLNu_HT-1200To2500_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM', '1.08' ], [ '/WJetsToLNu_HT-2500ToInf_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v3/MINIAODSIM', '0.008053' ], [ '/WW_TuneCP5_13TeV-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM', '75.89' ], [ '/WZ_TuneCP5_13TeV-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM', '36.27' ], # WG # [ '/WGGJets_TuneCP5_13TeV_madgraphMLM_pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM', '1.693' ], [ '/WWG_TuneCP5_13TeV-amcatnlo-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM', '0.2147' ], [ '/WWW_4F_TuneCP5_13TeV-amcatnlo-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v2/MINIAODSIM', '0.2086' ], [ '/WWZ_4F_TuneCP5_13TeV-amcatnlo-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v2/MINIAODSIM', '0.1651' ], [ '/WZZ_TuneCP5_13TeV-amcatnlo-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM', '0.05565' ], [ '/WZG_TuneCP5_13TeV-amcatnlo-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM', '0.04345' ], #### Z+X #### [ '/ZJetsToNuNu_HT-100To200_13TeV-madgraph/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM', '304.1' ], [ '/ZJetsToNuNu_HT-200To400_13TeV-madgraph/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM', '91.79' ], [ '/ZJetsToNuNu_HT-400To600_13TeV-madgraph/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM', '13.12' ], [ '/ZJetsToNuNu_HT-600To800_13TeV-madgraph/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM', '3.259' ], [ '/ZJetsToNuNu_HT-800To1200_13TeV-madgraph/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM', '1.499' ], [ '/ZJetsToNuNu_HT-1200To2500_13TeV-madgraph/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM', '0.343' ], [ '/ZJetsToNuNu_HT-2500ToInf_13TeV-madgraph/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM', '0.005146' ], [ '/ZZ_TuneCP5_13TeV-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM', '12.14' ], # ZG # [ '/ZGGJets_ZToHadOrNu_5f_LO_madgraph_pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM', '0.3570' ], [ '/ZZZ_TuneCP5_13TeV-amcatnlo-pythia8/RunIIFall17MiniAODv2-PU2017_12Apr2018_94X_mc2017_realistic_v14-v1/MINIAODSIM', '0.01398' ], # ZZG # # GG # # GGG # ] for inDO in inputDataAndOpts: # inDO[0] is of the form /A/B/C. Since A is (mostly) unique for each inDO in Monte Carlo, use this in the CRAB request name. datasetName = inDO[0].split('/')[1] # if "_ext" in inDO[0] : datasetName += "_ext" config.General.requestName = datasetName config.JobType.pyCfgParams = [ 'globalTag=94X_mc2017_realistic_v14', 'nThreads=' + str(config.JobType.numCores), 'xsec=' + inDO[1], 'filterEff=1', 'BR=1', 'isBkgd=True', 'inputPaths=' + inputPaths, 'inputFilters=' + inputFilters, 'inputFlags=' + inputFlags ] config.Data.inputDataset = inDO[0] config.Data.outputDatasetTag = '%s_%s' % ( config.General.workArea, config.General.requestName) # Submit. try: print "Submitting for input dataset %s" % (inDO[0]) crabCommand(options.crabCmd, config=config, *options.crabCmdOpts.split()) os.system( "rm -rf %s/crab_%s/inputs" % (config.General.workArea, config.General.requestName)) except HTTPException as hte: print "Submission for input dataset %s failed: %s" % ( inDO[0], hte.headers) except ClientException as cle: print "Submission for input dataset %s failed: %s" % (inDO[0], cle) # All other commands can be simply executed. elif options.workArea: for dir in os.listdir(options.workArea): projDir = os.path.join(options.workArea, dir) if not os.path.isdir(projDir): continue # Execute the crab command. msg = "Executing (the equivalent of): crab %s --dir %s %s" % ( options.crabCmd, projDir, options.crabCmdOpts) print "-" * len(msg) print msg print "-" * len(msg) try: crabCommand(options.crabCmd, dir=projDir, *options.crabCmdOpts.split()) except HTTPException as hte: print "Failed executing command %s for task %s: %s" % ( options.crabCmd, projDir, hte.headers) except ClientException as cle: print "Failed executing command %s for task %s: %s" % ( options.crabCmd, projDir, cle)
def main(): options = getOptions() # The submit command needs special treatment. if options.crabCmd == 'submit': #-------------------------------------------------------- # This is the base config: #-------------------------------------------------------- from CRABClient.UserUtilities import config config = config() config.General.requestName = None #config.General.workArea = 'ZMuondecay' config.General.workArea = '2016DT_psi_mm_v7_4CB' config.General.transferOutputs = True config.General.transferLogs = False config.JobType.pluginName = 'Analysis' config.JobType.psetName = '/afs/cern.ch/work/g/gayalasa/public/Zll/z_sl7/ZtoJpsill/CMSSW_10_6_3/src/AnalizeZll/ZtoJpsileplep/test/runV7Muon.py' #2018 DT configfile config.JobType.allowUndistributedCMSSW = True config.Data.inputDataset = None config.Data.inputDBS = 'global' # config.Data.splitting = 'Automatic' # config.Data.splitting = 'LumiBased' config.Data.splitting = 'FileBased' # config.Data.unitsPerJob = 30 config.Data.unitsPerJob = 1 # config.Data.totalUnits = 30 config.Data.lumiMask = 'https://cms-service-dqm.web.cern.ch/cms-service-dqm/CAF/certification/Collisions16/13TeV/Legacy_2016/Cert_271036-284044_13TeV_Legacy2016_Collisions16_JSON_MuonPhys.txt' #has nosence in Mc config.Data.publication = True config.Data.outputDatasetTag = None #config.Data.outLFNDirBase = '/store/user/%s/Zpsi_mm16_v7_4/' % ("gayalasa") #config.Site.storageSite = 'T3_US_FNALLPC' config.Site.storageSite = 'T3_CH_CERNBOX' config.Data.outLFNDirBase = 'gsiftp://eosuserftp.cern.ch/eos/user/g/%s/Zpsi_mm16_v7_4' %("gayalasa") #config.Site.storageSite = None # Choose your site. #-------------------------------------------------------- # Will submit one task for each of these input datasets. inputDatasets = [ '/DoubleEG/Run2016B-17Jul2018_ver1-v1/MINIAOD', # DoubleEG 8 '/DoubleEG/Run2016B-17Jul2018_ver2-v1/MINIAOD', '/DoubleEG/Run2016C-17Jul2018-v1/MINIAOD', '/DoubleEG/Run2016D-17Jul2018-v1/MINIAOD', '/DoubleEG/Run2016E-17Jul2018-v1/MINIAOD', '/DoubleEG/Run2016F-17Jul2018-v1/MINIAOD', '/DoubleEG/Run2016G-17Jul2018-v1/MINIAOD', '/DoubleEG/Run2016H-17Jul2018-v1/MINIAOD', '/MuonEG/Run2016B-17Jul2018_ver1-v1/MINIAOD', # MuonEG 9 '/MuonEG/Run2016B-17Jul2018_ver2-v1/MINIAOD', '/MuonEG/Run2016C-17Jul2018-v1/MINIAOD', '/MuonEG/Run2016D-17Jul2018-v1/MINIAOD', '/MuonEG/Run2016E-17Jul2018-v1/MINIAOD', '/MuonEG/Run2016E-17Jul2018-v2/MINIAOD', #justRun E '/MuonEG/Run2016F-17Jul2018-v1/MINIAOD', '/MuonEG/Run2016G-17Jul2018-v1/MINIAOD', '/MuonEG/Run2016H-17Jul2018-v1/MINIAOD', '/SingleElectron/Run2016B-17Jul2018_ver1-v1/MINIAOD', # SingleElectron 8 '/SingleElectron/Run2016B-17Jul2018_ver2-v1/MINIAOD', '/SingleElectron/Run2016C-17Jul2018-v1/MINIAOD', '/SingleElectron/Run2016D-17Jul2018-v1/MINIAOD', '/SingleElectron/Run2016E-17Jul2018-v1/MINIAOD', '/SingleElectron/Run2016F-17Jul2018-v1/MINIAOD', '/SingleElectron/Run2016G-17Jul2018-v1/MINIAOD', '/SingleElectron/Run2016H-17Jul2018-v1/MINIAOD', '/SingleMuon/Run2016B-17Jul2018_ver1-v1/MINIAOD', # SingleElectron 8 '/SingleMuon/Run2016B-17Jul2018_ver2-v1/MINIAOD', '/SingleMuon/Run2016C-17Jul2018-v1/MINIAOD', '/SingleMuon/Run2016D-17Jul2018-v1/MINIAOD', '/SingleMuon/Run2016E-17Jul2018-v1/MINIAOD', '/SingleMuon/Run2016F-17Jul2018-v1/MINIAOD', '/SingleMuon/Run2016G-17Jul2018-v1/MINIAOD', '/SingleMuon/Run2016H-17Jul2018-v1/MINIAOD', '/DoubleMuon/Run2016B-17Jul2018_ver1-v1/MINIAOD', # DoubleEG 8 '/DoubleMuon/Run2016B-17Jul2018_ver2-v1/MINIAOD', '/DoubleMuon/Run2016C-17Jul2018-v1/MINIAOD', '/DoubleMuon/Run2016D-17Jul2018-v1/MINIAOD', '/DoubleMuon/Run2016E-17Jul2018-v1/MINIAOD', '/DoubleMuon/Run2016F-17Jul2018-v1/MINIAOD', '/DoubleMuon/Run2016G-17Jul2018-v1/MINIAOD', '/DoubleMuon/Run2016H-17Jul2018-v1/MINIAOD' ] for inDS in inputDatasets: # inDS is of the form /A/B/C. Since B is unique for each inDS, use this in the CRAB request name. config.General.requestName = inDS.split('/')[1]+inDS.split('/')[2] config.Data.inputDataset = inDS config.Data.outputDatasetTag = '%s_%s' % (config.General.workArea, config.General.requestName) # Submit. try: print "Submitting for input dataset %s" % (inDS) crabCommand(options.crabCmd, config = config, *options.crabCmdOpts.split()) except HTTPException as hte: print "Submission for input dataset %s failed: %s" % (inDS, hte.headers) except ClientException as cle: print "Submission for input dataset %s failed: %s" % (inDS, cle)
try: print "Submitting for input dataset %s" % (inDS) crabCommand(options.crabCmd, config = config, *options.crabCmdOpts.split()) except HTTPException as hte: print "Submission for input dataset %s failed: %s" % (inDS, hte.headers) except ClientException as cle: print "Submission for input dataset %s failed: %s" % (inDS, cle) # All other commands can be simply executed. elif options.workArea: for dir in os.listdir(options.workArea): projDir = os.path.join(options.workArea, dir) if not os.path.isdir(projDir): continue # Execute the crab command. msg = "Executing (the equivalent of): crab %s --dir %s %s" % (options.crabCmd, projDir, options.crabCmdOpts) print "-"*len(msg) print msg print "-"*len(msg) try: crabCommand(options.crabCmd, dir = projDir, *options.crabCmdOpts.split()) except HTTPException as hte: print "Failed executing command %s for task %s: %s" % (options.crabCmd, projDir, hte.headers) except ClientException as cle: print "Failed executing command %s for task %s: %s" % (options.crabCmd, projDir, cle) if __name__ == '__main__': main()
def main(): options = getOptions() # The submit command needs special treatment. if options.crabCmd == 'submit': #-------------------------------------------------------- # This is the base config: #-------------------------------------------------------- from CRABClient.UserUtilities import config config = config() config.General.requestName = None #config.General.workArea = 'ZMuondecay' config.General.workArea = '2016MC_psi_ee_v7_3' config.General.transferOutputs = True config.General.transferLogs = False config.JobType.pluginName = 'Analysis' config.JobType.psetName = '/afs/cern.ch/work/g/gayalasa/public/Zll/z_sl7/ZtoJpsill/CMSSW_10_6_3/src/AnalizeZll/ZtoJpsileplep/test/runV7Elec16_MC.py' #2018 DT configfile config.JobType.allowUndistributedCMSSW = True config.Data.inputDataset = None config.Data.inputDBS = 'global' # config.Data.splitting = 'Automatic' # config.Data.splitting = 'LumiBased' config.Data.splitting = 'FileBased' # config.Data.unitsPerJob = 30 config.Data.unitsPerJob = 1 # config.Data.totalUnits = 30 #config.Data.lumiMask = 'https://cms-service-dqm.web.cern.ch/cms-service-dqm/CAF/certification/Collisions16/13TeV/Legacy_2016/Cert_271036-284044_13TeV_Legacy2016_Collisions16_JSON.txt' config.Data.publication = True config.Data.outputDatasetTag = None config.Data.outLFNDirBase = '/store/user/%s/Zpsi_ee16mc_v7_3/' % ("gayalasa") config.Site.storageSite = 'T3_US_FNALLPC' #config.Site.storageSite = None # Choose your site. #-------------------------------------------------------- # Will submit one task for each of these input datasets. inputDatasets = [ '/ZToJPsiEE_TuneCUEP8M1_13TeV-pythia8/RunIISummer16MiniAODv3-PUMoriond17_94X_mcRun2_asymptotic_v3-v2/MINIAODSIM' ] for inDS in inputDatasets: # inDS is of the form /A/B/C. Since B is unique for each inDS, use this in the CRAB request name. config.General.requestName = (inDS.split('/')[2]).split('-')[0]+(inDS.split('/')[2]).split('-')[-1] config.Data.inputDataset = inDS config.Data.outputDatasetTag = '%s_%s' % (config.General.workArea, config.General.requestName) # Submit. try: print "Submitting for input dataset %s" % (inDS) crabCommand(options.crabCmd, config = config, *options.crabCmdOpts.split()) except HTTPException as hte: print "Submission for input dataset %s failed: %s" % (inDS, hte.headers) except ClientException as cle: print "Submission for input dataset %s failed: %s" % (inDS, cle) # All other commands can be simply executed. elif options.workArea: for dir in os.listdir(options.workArea): projDir = os.path.join(options.workArea, dir) if not os.path.isdir(projDir): continue # Execute the crab command. msg = "Executing (the equivalent of): crab %s --dir %s %s" % (options.crabCmd, projDir, options.crabCmdOpts) print "-"*len(msg) print msg print "-"*len(msg) try: crabCommand(options.crabCmd, dir = projDir, *options.crabCmdOpts.split()) except HTTPException as hte: print "Failed executing command %s for task %s: %s" % (options.crabCmd, projDir, hte.headers) except ClientException as cle: print "Failed executing command %s for task %s: %s" % (options.crabCmd, projDir, cle)
def main(): options = getOptions() # The submit command needs special treatment. if options.crabCmd == 'submit': #-------------------------------------------------------- # This is the base config: #-------------------------------------------------------- from CRABClient.UserUtilities import config config = config() config.General.requestName = None #config.General.workArea = 'ZMuondecay' config.General.workArea = 'Z4l_onlygen_v2' config.General.transferOutputs = True config.General.transferLogs = False config.JobType.pluginName = 'Analysis' config.JobType.psetName = '/afs/cern.ch/work/g/gayalasa/public/Zll/z_sl7/ZtoJpsill/CMSSW_10_6_3/src/AnalizeZll/ZtoJpsileplep/test/runOnlyGenZ4l.py' #2018 MC Z--> 2mu2mu config.JobType.allowUndistributedCMSSW = True config.Data.inputDataset = None config.Data.inputDBS = 'global' # config.Data.splitting = 'Automatic' # config.Data.splitting = 'LumiBased' config.Data.splitting = 'FileBased' config.Data.unitsPerJob = 1 # config.Data.totalUnits = 30 #config.Data.lumiMask = '' #has nosence in Mc config.Data.publication = True config.Data.outputDatasetTag = None config.Data.outLFNDirBase = '/store/user/%s/Z4l_onlygen_v2/' % ("gayalasa") config.Site.storageSite = 'T3_US_FNALLPC' #config.Site.storageSite = None # Choose your site. #-------------------------------------------------------- # Will submit one task for each of these input datasets. inputDatasets = [ '/ZZTo4L_TuneCP5_13TeV_powheg_pythia8/RunIIAutumn18MiniAOD-102X_upgrade2018_realistic_v15_ext1-v2/MINIAODSIM', # MC 4l 2mu 2mu '/ZZTo4L_TuneCP5_13TeV_powheg_pythia8/RunIIAutumn18MiniAOD-102X_upgrade2018_realistic_v15_ext2-v2/MINIAODSIM', '/ZZTo4L_13TeV_powheg_pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM' #2016 stefanos ] for inDS in inputDatasets: # inDS is of the form /A/B/C. Since B is unique for each inDS, use this in the CRAB request name. config.General.requestName = (inDS.split('/')[2]).split('-')[0]+(inDS.split('/')[2]).split('-')[-1]#'ZZTo4L_TuneCP5_13TeV' # hardcoded because is to big inDS.split('/')[1]+inDS.split('/')[2] config.Data.inputDataset = inDS config.Data.outputDatasetTag = '%s_%s' % (config.General.workArea, config.General.requestName) # Submit. try: print "Submitting for input dataset %s" % (inDS) crabCommand(options.crabCmd, config = config, *options.crabCmdOpts.split()) except HTTPException as hte: print "Submission for input dataset %s failed: %s" % (inDS, hte.headers) except ClientException as cle: print "Submission for input dataset %s failed: %s" % (inDS, cle) # All other commands can be simply executed. elif options.workArea: for dir in os.listdir(options.workArea): projDir = os.path.join(options.workArea, dir) if not os.path.isdir(projDir): continue # Execute the crab command. msg = "Executing (the equivalent of): crab %s --dir %s %s" % (options.crabCmd, projDir, options.crabCmdOpts) print "-"*len(msg) print msg print "-"*len(msg) try: crabCommand(options.crabCmd, dir = projDir, *options.crabCmdOpts.split()) except HTTPException as hte: print "Failed executing command %s for task %s: %s" % (options.crabCmd, projDir, hte.headers) except ClientException as cle: print "Failed executing command %s for task %s: %s" % (options.crabCmd, projDir, cle)
def flush_queue(self): if self.job_mode == 'interactive': pool = Pool(processes=self.parallel) result = pool.map(partial(run_command, self.dry_run), self.job_queue) script_list = [] if self.job_mode in ['script', 'lxbatch', 'SGE', 'psi']: if self.prefix_file != '': if self.prefix_file.endswith('.txt'): job_prefix_file = open(self.prefix_file, 'r') else: job_prefix_file = open( os.environ['CMSSW_BASE'] + "/src/CombineHarvester/CombineTools/input/job_prefixes/job_prefix_" + self.prefix_file + ".txt", 'r') global JOB_PREFIX JOB_PREFIX = job_prefix_file.read() % ( { 'CMSSW_BASE': os.environ['CMSSW_BASE'], 'SCRAM_ARCH': os.environ['SCRAM_ARCH'], 'PWD': os.environ['PWD'] }) job_prefix_file.close() for i, j in enumerate(range(0, len(self.job_queue), self.merge)): script_name = 'job_%s_%i.sh' % (self.task_name, i) # each job is given a slice from the list of combine commands of length 'merge' # we also keep track of the files that were created in case submission to a # batch system was also requested self.create_job_script(self.job_queue[j:j + self.merge], script_name, self.job_mode == 'script') script_list.append(script_name) if self.job_mode == 'psi': for script in script_list: full_script = os.path.abspath(script) # PSI needs some extra lines (probably this can be done with a prefix file) with open(full_script, 'r') as shFp: shText = shFp.read() shText = ( '#$ -o {0}/\n'.format(os.path.dirname(full_script)) + '#$ -e {0}/\n'.format(os.path.dirname(full_script)) + 'export VO_CMS_SW_DIR=/cvmfs/cms.cern.ch/\n' + 'source /cvmfs/cms.cern.ch/cmsset_default.sh\n' + 'source /swshare/psit3/etc/profile.d/cms_ui_env.sh\n' + shText) with open(full_script, 'w') as shFp: shFp.write(shText) run_command(self.dry_run, 'qsub %s %s' % (self.bopts, full_script)) if self.job_mode == 'lxbatch': for script in script_list: full_script = os.path.abspath(script) logname = full_script.replace('.sh', '_%J.log') run_command( self.dry_run, 'bsub -o %s %s %s' % (logname, self.bopts, full_script)) if self.job_mode == 'SGE': for script in script_list: full_script = os.path.abspath(script) logname = full_script.replace('.sh', '_%J.log') run_command( self.dry_run, 'qsub -o %s %s %s' % (logname, self.bopts, full_script)) if self.job_mode == 'crab3': #import the stuff we need from CRABAPI.RawCommand import crabCommand from httplib import HTTPException print '>> crab3 requestName will be %s' % self.task_name outscriptname = 'crab_%s.sh' % self.task_name print '>> crab3 script will be %s' % outscriptname outscript = open(outscriptname, "w") outscript.write(CRAB_PREFIX) jobs = 0 wsp_files = set() for i, j in enumerate(range(0, len(self.job_queue), self.merge)): jobs += 1 outscript.write('\nif [ $1 -eq %i ]; then\n' % jobs) for line in self.job_queue[j:j + self.merge]: newline = line if line.startswith('combine'): newline = line.replace('combine', './combine', 1) wsp = str(self.extract_workspace_arg(newline.split())) wsp_files.add(wsp) outscript.write( ' ' + newline.replace(wsp, os.path.basename(wsp)) + '\n') outscript.write('fi') outscript.write(CRAB_POSTFIX) outscript.close() from CombineHarvester.CombineTools.combine.crab import config config.General.requestName = self.task_name config.JobType.scriptExe = outscriptname config.JobType.inputFiles.extend(wsp_files) config.Data.totalUnits = jobs config.Data.outputDatasetTag = config.General.requestName if self.memory is not None: config.JobType.maxMemoryMB = self.memory if self.crab_area is not None: config.General.workArea = self.crab_area if self.custom_crab is not None: d = {} execfile(self.custom_crab, d) d['custom_crab'](config) print config if not self.dry_run: try: crabCommand('submit', config=config) except HTTPException, hte: print hte.headers
def main(): options = getOptions() # The submit command needs special treatment. if options.crabCmd == 'submit': #-------------------------------------------------------- # This is the base config: #-------------------------------------------------------- from CRABClient.UserUtilities import config config = config() config.General.requestName = None #config.General.workArea = 'ZMuondecay' config.General.workArea = '2018DT_psi_mm_v6' config.General.transferOutputs = True config.General.transferLogs = False config.JobType.pluginName = 'Analysis' config.JobType.psetName = '/afs/cern.ch/work/g/gayalasa/public/Zll/z_sl7/jpsielec18mc/CMSSW_10_2_10/src/AnalizeZee/ZelecTupler/test/runDatMuon18_v6.py' #2018 DT configfile config.JobType.allowUndistributedCMSSW = True config.Data.inputDataset = None config.Data.inputDBS = 'global' # config.Data.splitting = 'Automatic' config.Data.splitting = 'LumiBased' config.Data.unitsPerJob = 30 # config.Data.totalUnits = 30 #config.Data.lumiMask = '' #has nosence in Mc config.Data.publication = True config.Data.outputDatasetTag = None config.Data.outLFNDirBase = '/store/user/%s/Zpsi_mm18DT/' % ( "gayalasa") config.Site.storageSite = 'T3_US_FNALLPC' #config.Site.storageSite = None # Choose your site. #-------------------------------------------------------- # Will submit one task for each of these input datasets. inputDatasets = [ '/MuonEG/Run2018A-17Sep2018-v1/MINIAOD', # MuonEG '/MuonEG/Run2018B-17Sep2018-v1/MINIAOD', '/MuonEG/Run2018C-17Sep2018-v1/MINIAOD', '/MuonEG/Run2018D-PromptReco-v2/MINIAOD', # MuonEG '/EGamma/Run2018A-17Sep2018-v2/MINIAOD', # SingleElectron -> EGamma '/EGamma/Run2018B-17Sep2018-v1/MINIAOD', '/EGamma/Run2018C-17Sep2018-v1/MINIAOD', '/EGamma/Run2018D-PromptReco-v2/MINIAOD', # SingleElectron -> EGamma '/SingleMuon/Run2018A-17Sep2018-v2/MINIAOD', # SingleMuon '/SingleMuon/Run2018B-17Sep2018-v1/MINIAOD', '/SingleMuon/Run2018C-17Sep2018-v1/MINIAOD', '/SingleMuon/Run2018D-PromptReco-v2/MINIAOD', # SingleMuon '/DoubleMuon/Run2018A-17Sep2018-v2/MINIAOD', # DoubleMuon '/DoubleMuon/Run2018B-17Sep2018-v1/MINIAOD', '/DoubleMuon/Run2018C-17Sep2018-v1/MINIAOD', '/DoubleMuon/Run2018D-PromptReco-v2/MINIAOD' # DoubleMuon ] for inDS in inputDatasets: # inDS is of the form /A/B/C. Since B is unique for each inDS, use this in the CRAB request name. config.General.requestName = inDS.split('/')[1] + inDS.split( '/')[2] config.Data.inputDataset = inDS config.Data.outputDatasetTag = '%s_%s' % ( config.General.workArea, config.General.requestName) # Submit. try: print "Submitting for input dataset %s" % (inDS) crabCommand(options.crabCmd, config=config, *options.crabCmdOpts.split()) except HTTPException as hte: print "Submission for input dataset %s failed: %s" % ( inDS, hte.headers) except ClientException as cle: print "Submission for input dataset %s failed: %s" % (inDS, cle) # All other commands can be simply executed. elif options.workArea: for dir in os.listdir(options.workArea): projDir = os.path.join(options.workArea, dir) if not os.path.isdir(projDir): continue # Execute the crab command. msg = "Executing (the equivalent of): crab %s --dir %s %s" % ( options.crabCmd, projDir, options.crabCmdOpts) print "-" * len(msg) print msg print "-" * len(msg) try: crabCommand(options.crabCmd, dir=projDir, *options.crabCmdOpts.split()) except HTTPException as hte: print "Failed executing command %s for task %s: %s" % ( options.crabCmd, projDir, hte.headers) except ClientException as cle: print "Failed executing command %s for task %s: %s" % ( options.crabCmd, projDir, cle)
def submit(config): print " to do: ", config res = crabCommand('submit', config=config)
def main(): options = getOptions() # The submit command needs special treatment. if options.crabCmd == 'submit': #-------------------------------------------------------- # This is the base config: #-------------------------------------------------------- from CRABClient.UserUtilities import config config = config() config.General.requestName = None #config.General.workArea = 'ZMuondecay' config.General.workArea = 'onlyGenMC_jpsi' config.General.transferOutputs = True config.General.transferLogs = False config.JobType.pluginName = 'Analysis' config.JobType.psetName = '/afs/cern.ch/work/g/gayalasa/public/B0Analysis/CMSSW_10_6_12/src/myAnalyzers/BtoKsMuMu/test/MC_onlyGen.py' #MC Parked configfile config.JobType.allowUndistributedCMSSW = True config.Data.inputDataset = None config.Data.inputDBS = 'global' # config.Data.splitting = 'Automatic' config.Data.splitting = 'FileBased' config.Data.unitsPerJob = 10 # config.Data.totalUnits = 30 #config.Data.lumiMask = '' # no idea config.Data.publication = True config.Data.outputDatasetTag = None config.Data.outLFNDirBase = '/store/user/gayalasa/Sync/onlyGenMC/' #config.Site.storageSite = 'T3_US_FNALLPC' config.Site.storageSite = 'T3_CH_CERNBOX' #config.Site.whitelist = ['T2_US*'] #config.Data.ignoreLocality = True #config.Site.storageSite = None # Choose your site. #-------------------------------------------------------- # Will submit one task for each of these input datasets. inputDatasets = [ #'/BdToK0sMuMu_Mufilter_SoftQCDnonD_TuneCP5_13TeV-pythia8-evtgen/RunIIAutumn18MiniAOD-PUPoissonAve20_BParking_102X_upgrade2018_realistic_v15-v2/MINIAODSIM' '/BdToK0sJPsi_ToMuMu_Mufilter_SoftQCDnonD_TuneCP5_13TeV-pythia8-evtgen/RunIIAutumn18MiniAOD-PUPoissonAve20_BParking_102X_upgrade2018_realistic_v15-v2/MINIAODSIM', # Official probe filter res '/BdToK0sJPsi_JPsiToMuMu_SoftQCDnonD_TuneCP5_13TeV-pythia8-evtgen/RunIIAutumn18MiniAOD-PUPoissonAve20_BParking_102X_upgrade2018_realistic_v15-v2/MINIAODSIM' # Official no Probe filter res ] for inDS in inputDatasets: # inDS is of the form /A/B/C. Since B is unique for each inDS, use this in the CRAB request name. #config.General.requestName = inDS.split('/')[1]+'-'+inDS.split('/')[2] config.General.requestName = inDS.split('/')[1] config.Data.inputDataset = inDS config.Data.outputDatasetTag = '%s_%s' % ( config.General.workArea, config.General.requestName) # Submit. try: print "Submitting for input dataset %s" % (inDS) crabCommand(options.crabCmd, config=config, *options.crabCmdOpts.split()) except HTTPException as hte: print "Submission for input dataset %s failed: %s" % ( inDS, hte.headers) except ClientException as cle: print "Submission for input dataset %s failed: %s" % (inDS, cle) # All other commands can be simply executed. elif options.workArea: for dir in os.listdir(options.workArea): projDir = os.path.join(options.workArea, dir) if not os.path.isdir(projDir): continue # Execute the crab command. msg = "Executing (the equivalent of): crab %s --dir %s %s" % ( options.crabCmd, projDir, options.crabCmdOpts) print "-" * len(msg) print msg print "-" * len(msg) try: crabCommand(options.crabCmd, dir=projDir, *options.crabCmdOpts.split()) except HTTPException as hte: print "Failed executing command %s for task %s: %s" % ( options.crabCmd, projDir, hte.headers) except ClientException as cle: print "Failed executing command %s for task %s: %s" % ( options.crabCmd, projDir, cle)
def submit(config): try: crabCommand('submit', config=config) except HTTPException as hte: print('Cannot execute commend') print(hte.headers)
def do_submit(q, config, proxy=None): if not proxy: out = crabCommand('submit', config=config) else: out = crabCommand('submit', config=config, proxy=proxy) q.put(out)
def submit(config): res = crabCommand('submit', config = config)
def send_crab_command(*args, **kwargs): """ Send a crab command but try again (max 5 times) if server doesn't answer. """ return crabCommand(*args, **kwargs)
def submit(config): res = crabCommand('submit', config = config) #save crab config for the future with open(config.General.workArea + "/crab_" + config.General.requestName + "/crab_config.py", "w") as fi: fi.write(config.pythonise_())
def main(): options = getOptions() # The submit command needs special treatment. if options.crabCmd == 'submit': #-------------------------------------------------------- # This is the base config: #-------------------------------------------------------- from CRABClient.UserUtilities import config config = config() config.General.requestName = 'runHits_analysis' config.General.workArea = options.workArea config.General.transferOutputs = True config.General.transferLogs = False config.JobType.pluginName = 'Analysis' config.JobType.psetName = 'runHits_cfg.py' config.JobType.pyCfgParams = [ 'useMTDTrack=False', 'crysLayout=barzflat', 'output=DumpHits_noMTD.root' ] config.Data.inputDataset = None config.Data.inputDBS = 'phys03' config.Data.splitting = 'FileBased' config.Data.unitsPerJob = 1 config.Data.outLFNDirBase = '/store/user/meridian/MTD' config.Data.publication = False config.Data.outputDatasetTag = '10_4_0_mtd3_runHits_analysis_v7' config.Data.allowNonValidInputDataset = True config.Data.useParent = True config.Site.storageSite = 'T2_CH_CERN' config.User.voRole = 'priorityuser' #-------------------------------------------------------- # Will submit one task for each of these input datasets. inputDatasets = [ # '/RelValDYToLL_M_50_14TeV/meridian-CMSSW_10_4_0_mtd2_patch1-103X_upgrade2023_realistic_v2_2023D35noPU-v1-479d09f3e9ff3659dd49d9006e28a0a3/USER', # '/RelValSingleMuFlatPt_0p7to10_pythia8/meridian-RelValSingleMuFlatPt0p7to10pythia8CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v2-479d09f3e9ff3659dd49d9006e28a0a3/USER', # '/RelValMinBias_14TeV/meridian-RelValMinBias14TeVCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1-479d09f3e9ff3659dd49d9006e28a0a3/USER', # '/RelValSinglePiFlatPt_0p7to10_pythia8_cfi/meridian-RelValSinglePiFlatPt0p7to10pythia8cfiCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v2-479d09f3e9ff3659dd49d9006e28a0a3/USER', # '/RelValSingleKaonFlatPt_0p7to10/meridian-RelValSingleKaonFlatPt0p7to10CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1-479d09f3e9ff3659dd49d9006e28a0a3/USER' #### V2 ### chi2 cut @ 50 # '/RelValSinglePiFlatPt_0p7to10_pythia8_cfi/meridian-RelValSinglePiFlatPt0p7to10pythia8cfiCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v2V2-479d09f3e9ff3659dd49d9006e28a0a3/USER', #### V3 ### chi2 cut @ 1000 # '/RelValSinglePiFlatPt_0p7to10_pythia8_cfi/meridian-RelValSinglePiFlatPt0p7to10pythia8cfiCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v2V3-479d09f3e9ff3659dd49d9006e28a0a3/USER', # '/RelValDYToLL_M_50_14TeV/meridian-RelValDYToLLM5014TeVCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V3-479d09f3e9ff3659dd49d9006e28a0a3/USER', # '/RelValMinBias_14TeV/meridian-RelValMinBias14TeVCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V3-479d09f3e9ff3659dd49d9006e28a0a3/USER', # '/RelValSingleMuFlatPt_0p7to10_pythia8/meridian-RelValSingleMuFlatPt0p7to10pythia8CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v2V3-479d09f3e9ff3659dd49d9006e28a0a3/USER', # '/RelValSingleKaonFlatPt_0p7to10/meridian-RelValSingleKaonFlatPt0p7to10CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V3-479d09f3e9ff3659dd49d9006e28a0a3/USER', # '/RelValSingleProtonFlatPt_0p7to10/meridian-RelValSingleProtonFlatPt0p7to10CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V3-479d09f3e9ff3659dd49d9006e28a0a3/USER', # '/RelValDYToLL_M_50_14TeV/meridian-RelValDYToLLM5014TeVCMSSW1040mtd2patch1-PU25ns103Xupgrade2023realisticv22023D35PU200-v1V3-479d09f3e9ff3659dd49d9006e28a0a3/USER', # '/DYToLL_M-50_14TeV_pythia8/meridian-DYToLLM-5014TeVpythia8PhaseIIMTDTDRAutumn18DR-PU200pilot103Xupgrade2023realisticv2ext2-v2V3-479d09f3e9ff3659dd49d9006e28a0a3/USER', # '/RelValNuGun/meridian-RelValNuGunCMSSW1040mtd2patch1-PU25ns103Xupgrade2023realisticv22023D35PU200-v1V3-479d09f3e9ff3659dd49d9006e28a0a3/USE', # '/RelValSingleMuFlatPt_0p7to10/meridian-RelValSingleMuFlatPt0p7to10CMSSW1040mtd2patch1-PU25ns103Xupgrade2023realisticv22023D35PU200-v1V3-479d09f3e9ff3659dd49d9006e28a0a3/USER' ### V4 chi2=50 '/RelValSingleProtonFlatPt_0p7to10/meridian-RelValSingleProtonFlatPt0p7to10CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V4-479d09f3e9ff3659dd49d9006e28a0a3/USER', '/RelValSinglePiFlatPt_0p7to10_pythia8_cfi/meridian-RelValSinglePiFlatPt0p7to10pythia8cfiCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v2V4-479d09f3e9ff3659dd49d9006e28a0a3/USER', '/RelValSingleMuFlatPt_0p7to10_pythia8/meridian-RelValSingleMuFlatPt0p7to10pythia8CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v2V4-479d09f3e9ff3659dd49d9006e28a0a3/USER', '/RelValSingleMuFlatPt_0p7to10/meridian-RelValSingleMuFlatPt0p7to10CMSSW1040mtd2patch1-PU25ns103Xupgrade2023realisticv22023D35PU200-v1V4-479d09f3e9ff3659dd49d9006e28a0a3/USER', '/RelValSingleKaonFlatPt_0p7to10/meridian-RelValSingleKaonFlatPt0p7to10CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V4-479d09f3e9ff3659dd49d9006e28a0a3/USER', '/RelValNuGun/meridian-RelValNuGunCMSSW1040mtd2patch1-PU25ns103Xupgrade2023realisticv22023D35PU200-v1V4-479d09f3e9ff3659dd49d9006e28a0a3/USER', '/RelValMinBias_14TeV/meridian-RelValMinBias14TeVCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V4-479d09f3e9ff3659dd49d9006e28a0a3/USER', '/RelValDYToLL_M_50_14TeV/meridian-RelValDYToLLM5014TeVCMSSW1040mtd2patch1-PU25ns103Xupgrade2023realisticv22023D35PU200-v1V4-479d09f3e9ff3659dd49d9006e28a0a3/USER', '/RelValDYToLL_M_50_14TeV/meridian-RelValDYToLLM5014TeVCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V4-479d09f3e9ff3659dd49d9006e28a0a3/USER' '/DYToLL_M-50_14TeV_pythia8/meridian-DYToLLM-5014TeVpythia8PhaseIIMTDTDRAutumn18DR-PU200pilot103Xupgrade2023realisticv2ext2-v2V4-479d09f3e9ff3659dd49d9006e28a0a3/USER' ] for inDS in inputDatasets: # inDS is of the form /A/B/C. Since B is unique for each inDS, use this in the CRAB request name. config.General.requestName = 'runHits_%s' % (inDS.split('/')[1]) config.General.requestName = config.General.requestName.translate( None, '_') config.Data.inputDataset = inDS config.Data.outputDatasetTag = '%s_v4_noMTDTrack' % ( config.General.requestName) # Submit. try: print "Submitting for input dataset %s" % (inDS) crabCommand(options.crabCmd, config=config, *options.crabCmdOpts.split()) except HTTPException as hte: print "Submission for input dataset %s failed: %s" % ( inDS, hte.headers) except ClientException as cle: print "Submission for input dataset %s failed: %s" % (inDS, cle) # All other commands can be simply executed. elif options.workArea: for dir in os.listdir(options.workArea): projDir = os.path.join(options.workArea, dir) if not os.path.isdir(projDir): continue # Execute the crab command. msg = "Executing (the equivalent of): crab %s --dir %s %s" % ( options.crabCmd, projDir, options.crabCmdOpts) print "-" * len(msg) print msg print "-" * len(msg) try: crabCommand(options.crabCmd, dir=projDir, *options.crabCmdOpts.split()) except HTTPException as hte: print "Failed executing command %s for task %s: %s" % ( options.crabCmd, projDir, hte.headers) except ClientException as cle: print "Failed executing command %s for task %s: %s" % ( options.crabCmd, projDir, cle)
#!/bin/env python import commands, re, subprocess from CRABAPI.RawCommand import crabCommand def get_tasks(): report = commands.getoutput("find ./ -maxdepth 1 -type d -name 'crab_*'") lines = report.split("\n") tasks = [] for line in lines: if re.search('crab_configs', line): continue tasks.append(line) return tasks tasks = get_tasks() for task in tasks: print "\n", task report = crabCommand('status', dir=task) if 'jobsPerStatus' in report: if 'failed' in report['jobsPerStatus']: print "Found failed jobs. Resubmit" crabCommand('resubmit', dir=task) # process = subprocess.Popen("crab resubmit -d %s"%task, shell=True) # process.wait() # print process.returncode
config_tmp.JobType.pyCfgParams = ['isMC=0', 'datasetTag=' + dt] config_tmp.Data.lumiMask = json elif 'WRto' in dt or dt == 'WW' or dt == 'WZ' or dt == 'ZZ' or 'QCD' in dt: config_tmp.JobType.pyCfgParams = [ 'isMC=1', 'datasetTag=' + dt, 'lheAvailable=False' ] config_tmp.Data.lumiMask = '' #else: # config_tmp.JobType.pyCfgParams = ['isMC=1','datasetTag='+dt,'runHLT=0'] # config_tmp.Data.lumiMask = '' else: config_tmp.JobType.pyCfgParams = ['isMC=1', 'datasetTag=' + dt] config_tmp.Data.lumiMask = '' if '/USER' in d: config_tmp.Data.inputDBS = 'phys03' crabCommand('submit', config=config_tmp) config_tmp = 0 for d, dt in zip(datasets, datasetTags): if 'DoubleEG' not in dt: continue for j in [ 'data/ElectronTriggerGsfTrkIdVL.json', 'data/ElectronTriggerMW.json' ]: jname = dt + '_' + j.strip("data/").strip(".json") print jname if os.path.isdir("crab/crab_runAnalysis_80X_WRv07_" + jname): print "Directory already exists.\nContinue." continue config_tmp = config config_tmp.General.requestName = 'runAnalysis_80X_WRv07_' + jname
def SubmitJob(key, value): doAll = False doTest = False allSelKeys = selSubmitKey.split() if selSubmitKey.find('NONE') != -1: print "Nothing to be done!" sys.exit() if selSubmitKey.find('ALL') != -1: doAll = True if selSubmitKey.find('TEST') != -1: doTest = True doThis = doAll if not doAll: for selKey in allSelKeys: if key.find(selKey) != -1: doThis = True break if not doThis: return tempconfig = copy.deepcopy(config) tempconfig.General.requestName = key tempconfig.General.workArea = workArea tempconfig.Data.outputDatasetTag = Pubname + "_" + key tempconfig.Data.outLFNDirBase = outDir if len(value) < 3: print "Not enough argument for %s" % key raise AssertionError() if value[0]: # Data if key.find('Run2015C') != -1: tempconfig.JobType.pyCfgParams = [ 'mcInfo=0', 'GlobalTag=74X_dataRun2_v4', 'specialFix=JEC', 'jecDBname=Summer15_25nsV6_DATA', 'externalFilterList=csc2015_Dec01.txt.tar.gz,ecalscn1043093_Dec01.txt.tar.gz,badResolutionTrack_Jan13.txt.tar.gz,muonBadTrack_Jan13.txt.tar.gz' ] tempconfig.JobType.inputFiles = [ json_25ns, 'Summer15_25nsV6_DATA.db', 'csc2015_Dec01.txt.tar.gz', 'ecalscn1043093_Dec01.txt.tar.gz', 'csc2015_Dec01.txt', 'ecalscn1043093_Dec01.txt', 'badResolutionTrack_Jan13.txt', 'muonBadTrack_Jan13.txt' ] tempconfig.Data.splitting = 'LumiBased' tempconfig.Data.lumiMask = json_25ns elif key.find('Run2015D-05Oct2015') != -1: tempconfig.JobType.pyCfgParams = [ 'mcInfo=0', 'GlobalTag=74X_dataRun2_reMiniAOD_v0', 'specialFix=JEC', 'jecDBname=Summer15_25nsV6_DATA', 'externalFilterList=csc2015_Dec01.txt.tar.gz,ecalscn1043093_Dec01.txt.tar.gz,badResolutionTrack_Jan13.txt.tar.gz,muonBadTrack_Jan13.txt.tar.gz' ] tempconfig.JobType.inputFiles = [ json_25ns, 'Summer15_25nsV6_DATA.db', 'csc2015_Dec01.txt.tar.gz', 'ecalscn1043093_Dec01.txt.tar.gz', 'csc2015_Dec01.txt', 'ecalscn1043093_Dec01.txt', 'badResolutionTrack_Jan13.txt', 'muonBadTrack_Jan13.txt' ] tempconfig.Data.splitting = 'LumiBased' tempconfig.Data.lumiMask = json_25ns elif key.find('Run2015D-PromptReco') != -1: tempconfig.JobType.pyCfgParams = [ 'mcInfo=0', 'GlobalTag=74X_dataRun2_Prompt_v4', 'specialFix=JEC', 'jecDBname=Summer15_25nsV6_DATA', 'externalFilterList=csc2015_Dec01.txt.tar.gz,ecalscn1043093_Dec01.txt.tar.gz,badResolutionTrack_Jan13.txt.tar.gz,muonBadTrack_Jan13.txt.tar.gz' ] tempconfig.JobType.inputFiles = [ json_25ns, 'Summer15_25nsV6_DATA.db', 'csc2015_Dec01.txt.tar.gz', 'ecalscn1043093_Dec01.txt.tar.gz', 'csc2015_Dec01.txt', 'ecalscn1043093_Dec01.txt', 'badResolutionTrack_Jan13.txt', 'muonBadTrack_Jan13.txt' ] tempconfig.Data.splitting = 'LumiBased' tempconfig.Data.lumiMask = json_25ns else: pass else: if key.find('FastSim') != -1: tempconfig.JobType.pyCfgParams = [ 'mcInfo=1', 'GlobalTag=74X_mcRun2_asymptotic_v2', 'specialFix=JEC', 'jecDBname=MCRUN2_74_V9', 'fastsim=1' ] tempconfig.JobType.inputFiles = ['MCRUN2_74_V9.db'] tempconfig.Data.splitting = 'FileBased' else: tempconfig.JobType.pyCfgParams = [ 'mcInfo=1', 'GlobalTag=74X_mcRun2_asymptotic_v2', 'specialFix=JEC', 'jecDBname=Summer15_25nsV6_MC' ] tempconfig.JobType.inputFiles = ['Summer15_25nsV6_MC.db'] tempconfig.Data.splitting = 'FileBased' tempconfig.Data.inputDataset = value[1].strip() tempconfig.Data.unitsPerJob = value[2] if value[0] and len(value) > 3: tempconfig.Data.lumiMask = value[3] # Submitting jobs if doTest: saveConfigurationFile(tempconfig, workArea + "/test/" + key + "_test_cfg.py") tasklist["crab_" + key] = key else: results = crabCommand('submit', config=tempconfig) tasklist[results['uniquerequestname']] = key del tempconfig
def main(): options = getOptions() isData = False if options.sampleType == 'data' or options.sampleType == 'NoBPTX' or options.sampleType == 'trig': isData = True year = options.year # The submit command needs special treatment. if options.crabCmd == 'submit': #-------------------------------------------------------- # This is the base config: #-------------------------------------------------------- from CRABClient.UserUtilities import config #, getUsernameFromSiteDB config = config() if not options.workArea: config.General.workArea = 'crab' else: config.General.workArea = options.workArea config.General.transferOutputs = True config.General.transferLogs = True config.JobType.pluginName = 'Analysis' config.JobType.psetName = 'iDMAnalyzer_cfg.py' #config.JobType.psetName = 'python/iDMAnalyzer_cfg.py' #config.JobType.maxMemoryMB = 4000 #config.JobType.numCores = 1 config.Data.splitting = 'Automatic' if isData == True: #config.Data.splitting = 'LumiBased' #config.Data.unitsPerJob = 100 if options.sampleType == 'NoBPTX': config.Data.lumiMask = '../data/CosmicJSON_E_D_UGMT_bottomOnly.txt' elif year == '2018': config.Data.lumiMask = 'https://cms-service-dqm.web.cern.ch/cms-service-dqm/CAF/certification/Collisions18/13TeV/ReReco/Cert_314472-325175_13TeV_17SeptEarlyReReco2018ABC_PromptEraD_Collisions18_JSON.txt' elif year == '2017': config.Data.lumiMask = 'https://cms-service-dqm.web.cern.ch/cms-service-dqm/CAF/certification/Collisions17/13TeV/ReReco/Cert_294927-306462_13TeV_EOY2017ReReco_Collisions17_JSON_v1.txt' elif year == '2016': config.Data.lumiMask = 'https://cms-service-dqm.web.cern.ch/cms-service-dqm/CAF/certification/Collisions16/13TeV/ReReco/Final/Cert_271036-284044_13TeV_ReReco_07Aug2017_Collisions16_JSON.txt' if isData == True: config.Data.outLFNDirBase = '/store/group/lpcmetx/iDM/Ntuples/%s/data_fourteenthrun' % year else: if options.sampleType == 'signal': config.Data.outLFNDirBase = '/store/group/lpcmetx/iDM/Ntuples/%s/signal_fourteenthrun' % year else: config.Data.outLFNDirBase = '/store/group/lpcmetx/iDM/Ntuples/%s/backgrounds_fourteenthrun' % year config.Data.publication = False config.Data.ignoreLocality = True if options.sampleType == 'cosmics': config.Data.inputDBS = 'phys03' #config.Site.ignoreGlobalBlacklist = True #config.Site.whitelist = ['T2_RU_ITEP'] config.Site.whitelist = [ 'T2_US_*', 'T2_DE_*', 'T2_EE_*', 'T2_ES_*', 'T2_GR_*', 'T2_HU_*', 'T2_IT_*', 'T2_RU_*', 'T2_UK_*' ] config.Site.blacklist = ['T2_TW_NCHC', 'T2_BE_IIHE', 'T2_FR_GRIF_LLR'] #config.Site.blac = ['T3_RU_FIAN', 'T3_US_MIT', 'T3_US_UCD', #'T3_CO_Uniandes', 'T3_US_NotreDame', #'T3_IN_PUHEP', 'T3_UK_ScotGrid_ECDF', 'T3_BY_NCPHEP', #'T2_CH_CERN_HLT', 'T3_CH_CERN_HelixNebula', 'T3_IN_TIFRCloud', #'T0_CH_CERN', 'T3_GR_IASA', 'T3_CN_PKU', 'T0_CH_CSCS_HPC', #'T3_IR_IPM', 'T2_RU_ITEP', 'T3_US_JHU', 'T3_US_Kansas', #'T3_US_FSU', 'T3_KR_UOS', 'T3_CH_PSI'] #config.Site.whitelist = ["T3_US_FNALLPC"] config.Site.storageSite = 'T3_US_FNALLPC' #-------------------------------------------------------- total = {} with open('../data/dataset_db_%s.json' % year, 'r') as db: data = json.load(db) QCD = data['QCD_' + year] TTbar = data['TTbar_' + year] for it in [ 'TTTo2L2Nu', 'TTSemiLeptonic', 'TTToHadronic', 'TT_diLept' ]: if it in TTbar: del TTbar[it] SingleTop = data['SingleTop_' + year] #for it in ['ST_t-channel_top_5f', 'ST_t-channel_antitop_5f']: if year != '2016': for it in ['ST_t-channel_top_4f', 'ST_t-channel_antitop_4f']: if it in SingleTop: del SingleTop[it] WJets = data['WJets_' + year] for it in ['WJetsToLnu', 'WJetsToLNu_HT-70To100']: if it in WJets: del WJets[it] ZJets = data['ZJets_' + year] DY = data['DY_' + year] for it in ['DYJetsToTauTau', 'DYJetsToLL_M-5to50']: if it in DY: del DY[it] if year == '2017': for it in ['DYJetsToLL_M-50toInf']: if it in DY: del DY[it] elif year == '2016': for it in ['DYJetsToLL_M-50toInf_NLO']: if it in DY: del DY[it] Diboson = data['Diboson_' + year] for it in [ 'WZTo3LNu', 'ZZTo2L2Nu', 'ZZTo2L2Nu_ext2', 'WWTo2L2Nu', 'WWJJToLNuLNu' ]: if it in Diboson: del Diboson[it] Triboson = data['Triboson_' + year] total_MC = merge_dicts(QCD, TTbar, SingleTop, ZJets, WJets, DY, Diboson, Triboson) for key, val in total_MC.items(): total_MC[key + '_' + year] = val del total_MC[key] total_Data = data['Data_MET_' + year] #del total_Data["MET_2017RunB"] #del total_Data["MET_Run2018A"] #del total_Data["MET_2016RunB"] total_Trig = data['Data_SingleMu_' + year] total_NoBPTX = data['Data_NoBPTX_' + year] for it in [ 'NoBPTX_2016RunB', 'NoBPTX_2016RunC', 'NoBPTX_2016RunD', 'NoBPTX_2016RunF', 'NoBPTX_2016RunG', 'NoBPTX_2016RunH' ]: if it in total_NoBPTX: del total_NoBPTX[it] #total_cosmics = data['CosmicsMC_' + year] total_signal = data['signal_' + year] for key, val in total_signal.items(): total_signal[key + '_' + year] = val del total_signal[key] if options.sampleType == 'data': total = merge_dicts(total, total_Data) elif options.sampleType == 'signal': total = merge_dicts(total, total_signal) elif options.sampleType == 'trig': total = merge_dicts(total, total_Trig) elif options.sampleType == 'NoBPTX': total = merge_dicts(total, total_NoBPTX) elif options.sampleType == 'cosmics': total = merge_dicts(total, total_cosmics) elif options.sampleType == 'MC': total = merge_dicts(total, total_MC) elif options.sampleType == 'all': total = merge_dicts(total, total_MC, total_Data, total_Trig) elif options.sampleType == 'custom': #total = {'ST_tW_top': '/ST_tW_top_5f_inclusiveDecays_13TeV-powheg-pythia8_TuneCUETP8M1/RunIISummer16DR80Premix-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext1-v1/AODSIM', # 'WWW': '/WWW_4F_TuneCUETP8M1_13TeV-amcatnlo-pythia8/RunIISummer16DR80Premix-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/AODSIM', # 'WW': '/WW_TuneCUETP8M1_13TeV-pythia8/RunIISummer16DR80Premix-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/AODSIM' # } #total = {'ST_tW_top_2018': '/ST_tW_top_5f_inclusiveDecays_TuneCP5_13TeV-powheg-pythia8/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15_ext1-v1/AODSIM', # 'ST_t-channel_antitop_5f_2018': '/ST_t-channel_antitop_5f_TuneCP5_13TeV-powheg-pythia8/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15-v1/AODSIM', # 'ST_t-channel_top_5f_2018': '/ST_t-channel_top_5f_TuneCP5_13TeV-powheg-pythia8/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15-v1/AODSIM', # 'ZJetsToNuNu_HT-100To200_2018': '/ZJetsToNuNu_HT-100To200_13TeV-madgraph/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15-v1/AODSIM', # 'WJetsToLNu_HT-400To600_2018': '/WJetsToLNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15-v1/AODSIM', # 'WJetsToLNu_HT-200To400_2018': '/WJetsToLNu_HT-200To400_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15-v1/AODSIM', # 'DYJetsToLL_M-50toInf_2018': '/DYJetsToLL_M-50_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15-v1/AODSIM', # 'WWZ_2018': '/WWZ_TuneCP5_13TeV-amcatnlo-pythia8/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15_ext1-v2/AODSIM' # } #total = {'QCD_HT500To700_2017': '/QCD_HT500to700_TuneCP5_13TeV-madgraph-pythia8/RunIIFall17DRPremix-PU2017_94X_mc2017_realistic_v11-v2/AODSIM'} #total = {'DYJetsToLL_M-50_HT1200to2500_2017': '/DYJetsToLL_M-50_HT-1200to2500_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIFall17DRPremix-94X_mc2017_realistic_v11-v1/AODSIM'} #total = {'ZJetsToNuNu_HT-1200To2500_2018': '/ZJetsToNuNu_HT-1200To2500_13TeV-madgraph/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15-v1/AODSIM'} #total = {'QCD_HT200To300_2018': '/QCD_HT200to300_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15-v1/AODSIM'} #total = {'DYJetsToLL_M-50toInf_2016': '/DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16DR80Premix-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext2-v1/AODSIM'} #total = {'WJetsToLNu_HT-2500ToInf_2018': '/WJetsToLNu_HT-2500ToInf_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15-v1/AODSIM', # 'ZJetsToNuNu_HT-200To400_2018': '/ZJetsToNuNu_HT-200To400_13TeV-madgraph/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15-v1/AODSIM' # } #total = {'ST_t-channel_antitop_4f_2016': '/ST_t-channel_antitop_4f_inclusiveDecays_13TeV-powhegV2-madspin-pythia8_TuneCUETP8M1/RunIISummer16DR80Premix-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/AODSIM'} #total = {'QCD_HT100To200_2018': '/QCD_HT100to200_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15-v1/AODSIM'} #total = {'ST_s-channel_2016': '/ST_s-channel_4f_leptonDecays_13TeV-amcatnlo-pythia8_TuneCUETP8M1/RunIISummer16DR80Premix-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/AODSIM'} #total = {'WW_2017': '/WW_TuneCP5_13TeV-pythia8/RunIIFall17DRPremix-PU2017_94X_mc2017_realistic_v11-v1/AODSIM', # 'WWZ_2017': '/WWZ_4F_TuneCP5_13TeV-amcatnlo-pythia8/RunIIFall17DRPremix-PU2017_94X_mc2017_realistic_v11-v2/AODSIM', # 'WZZ_2017': '/WZZ_TuneCP5_13TeV-amcatnlo-pythia8/RunIIFall17DRPremix-PU2017_94X_mc2017_realistic_v11-v1/AODSIM', # 'WWW_2017': '/WWW_4F_TuneCP5_13TeV-amcatnlo-pythia8/RunIIFall17DRPremix-PU2017_94X_mc2017_realistic_v11-v2/AODSIM', # 'DYJetsToLL_M-50_HT100to200_2017': '/DYJetsToLL_M-50_HT-100to200_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIFall17DRPremix-PU2017_94X_mc2017_realistic_v11-v1/AODSIM' # } #total = {'DYJetsToLL_M-50toInf_2016': '/DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16DR80Premix-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext1-v1/AODSIM'} #total = {'DYJetsToLL_M-50_HT-400to600_2017': '/DYJetsToLL_M-50_HT-400to600_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIFall17DRPremix-94X_mc2017_realistic_v10_ext1-v1/AODSIM'} #total = {'ZJetsToNuNu_HT-200To400_2018': '/ZJetsToNuNu_HT-200To400_13TeV-madgraph/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15-v1/AODSIM'} #total = {'WJetsToLNu_HT-2500ToInf_2018': '/WJetsToLNu_HT-2500ToInf_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15-v1/AODSIM'} total = { 'QCD_HT500to700_2018': '/QCD_HT500to700_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15-v1/AODSIM' } #total = merge_dicts() # -------------------------------> put here the custom samples you want!!! else: print "ERROR! SampleType option %s not recoginzed." % options.sampleType sys.exit() if len(total) == 0: print "ERROR! No samples selected to be processed." sys.exit() for sample, dataset in total.items(): isRun2018D = False if sample == 'MET_Run2018D' or sample == 'NoBPTX_2018D': isRun2018D = True config.JobType.pyCfgParams = [ 'data={}'.format(isData), 'Run2018D={}'.format(isRun2018D), 'numThreads={}'.format(1), 'year={}'.format(year) ] config.JobType.numCores = 1 config.Data.inputDataset = dataset config.General.requestName = 'iDMAnalysis_' + sample #config.Data.outputDatasetTag = sample # If we need to pull input files from a list file instead of CRAB: #config.Data.userInputFiles = open(basedir + sample + '.list').readlines() # Submit. try: print "Submitting for input dataset %s with options %s" % ( sample, options.crabCmdOpts) crabCommand(options.crabCmd, config=config, *options.crabCmdOpts.split()) #p = Process(target=crabCommand, args=(options.crabCmd, config, options.crabCmdOpts.split(),)) #p.start() #p.join() except HTTPException as hte: print "Submission for input dataset %s failed: %s" % ( sample, hte.headers) except ClientException as cle: print "Submission for input dataset %s failed: %s" % (sample, cle) # All other commands can be simply executed. elif options.workArea: for dir in os.listdir(options.workArea): projDir = os.path.join(options.workArea, dir) if not os.path.isdir(projDir): continue # Execute the crab command. msg = "Executing (the equivalent of): crab %s --dir %s %s" % ( options.crabCmd, projDir, options.crabCmdOpts) print "-" * len(msg) print msg print "-" * len(msg) try: crabCommand(options.crabCmd, dir=projDir, *options.crabCmdOpts.split()) except HTTPException as hte: print "Failed executing command %s for task %s: %s" % ( options.crabCmd, projDir, hte.headers) except ClientException as cle: print "Failed executing command %s for task %s: %s" % ( options.crabCmd, projDir, cle)
def submit(config): try: crabCommand('submit', config=config) except HTTPException, hte: print 'Cannot execute command' print hte.headers
from CRABClient.ClientExceptions import ClientException from httplib import HTTPException config = config() config.General.requestName = 'HydjetDrum5F_RECODEBUG_WrongSignV0Skim_v2' config.General.workArea = 'CrabArea' config.General.transferOutputs = True config.General.transferLogs = False config.JobType.pluginName = 'Analysis' config.JobType.psetName = 'qw_HydjetDrum5F_withV0_WrongSignSkim_v1.py' #config.JobType.maxJobRuntimeMin = 2500 #config.JobType.inputFiles = ['Hydjet_PbPb_eff_v1.root', 'Hydjet_ppReco_v5_loose.root'] config.Data.inputDataset = '/MinBias_Hydjet_Drum5F_2018_5p02TeV/clindsey-RECODEBUG_20190625-5db5dfa073297cb96791f14c622e83e2/USER' config.Data.inputDBS = 'phys03' config.Data.splitting = 'FileBased' config.Data.unitsPerJob = 5 config.Data.outLFNDirBase = '/store/group/phys_heavyions/qwang/V0Production2018/' #config.Data.lumiMask = '/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions15/HI/Cert_262548-263757_PromptReco_HICollisions15_JSON_v2.txt' config.Data.publication = True #config.Data.outputDatasetTag = '' config.Data.useParent = False config.Site.storageSite = 'T2_CH_CERN' ##config.Data.allowNonValidInputDataset = True try: crabCommand('submit', config=config) except HTTPException as hte: print "Failed submitting task: %s" % (hte.headers) except ClientException as cle: print "Failed submitting task: %s" % (cle)
def submit(config): try: crabCommand('submit', config=config) except HTTPException, hte: print hte.headers
files = [ sample_dir + "/" + crab_trial + "/" + x for x in os.listdir(sample_dir + '/' + crab_trial) if "tree_" in x ] nfiles, nfinish, nall = len(files), 0, 0 if use_crab_dir: for d in os.listdir('./'): if 'crab_' in d and '_task_' in d: crablog = open(d + '/crab.log') crabloglines = [x.strip() for x in crablog] for crablogline in crabloglines: if task_name in crablogline: print 'Task name matches CRAB directory', bcolors.OKBLUE, d, bcolors.ENDC res = crabCommand('status', dir=d) nall = len(res['jobList']) for r in res['jobList']: if r[0] == 'finished': nfinish += 1 break print "All tasks: ", bcolors.OKGREEN, "%s" % nall, bcolors.ENDC print "Finished tasks:", bcolors.OKGREEN, "%s" % nfinish, bcolors.ENDC print "Found in dir: ", bcolors.OKGREEN, "%s" % nfiles, bcolors.ENDC print ' => fraction of hadded files:', "(%s)/(%s) = " % ( nfiles, nall), bcolors.OKBLUE, "%s" % (float(nfiles) / nall if nall > 0 else -1), bcolors.ENDC fres.write("%s, %s, %s/%s\n" % (task_name, sample_name + ext, nfiles, nall)) if nfiles == 0: continue
def main(): options = getOptions() # The submit command needs special treatment. if options.crabCmd == 'submit': #-------------------------------------------------------- # This is the base config: #-------------------------------------------------------- from CRABClient.UserUtilities import config config = config() config.General.requestName = None #config.General.workArea = 'ZMuondecay' config.General.workArea = 'V5_ULData' config.General.transferOutputs = True config.General.transferLogs = False config.JobType.pluginName = 'Analysis' config.JobType.psetName = '/afs/cern.ch/work/g/gayalasa/public/B0Analysis/CMSSW_10_6_12/src/myAnalyzers/BtoKsMuMu/test/MuMuks0_BestPA_V0Ext_Rootupler.py' #MC Parked configfile config.JobType.allowUndistributedCMSSW = True config.Data.inputDataset = None config.Data.inputDBS = 'global' # config.Data.splitting = 'Automatic' config.Data.splitting = 'FileBased' config.Data.unitsPerJob = 1 # config.Data.totalUnits = 30 #config.Data.lumiMask = '' # no idea config.Data.publication = True config.Data.outputDatasetTag = None config.Data.outLFNDirBase = '/store/user/gayalasa/V5_UL/' #config.Site.storageSite = 'T3_US_FNALLPC' config.Site.storageSite = 'T3_CH_CERNBOX' #config.Site.whitelist = ['T2_US*'] #config.Data.ignoreLocality = True #config.Site.storageSite = None # Choose your site. #-------------------------------------------------------- # Will submit one task for each of these input datasets. # Data taken from here: https://indico.cern.ch/event/1094697/contributions/4608916/attachments/2346139/4000698/Bstojpsiks_15_11_2021.pdf # https://twiki.cern.ch/twiki/bin/view/CMS/PdmVRun2LegacyAnalysis inputDatasets = [ '/Charmonium/Run2016B-21Feb2020_ver1_UL2016_HIPM-v1/MINIAOD', # UL 2016 35.92 fb-1 '/Charmonium/Run2016B-21Feb2020_ver2_UL2016_HIPM-v1/MINIAOD', '/Charmonium/Run2016C-21Feb2020_UL2016_HIPM-v1/MINIAOD', '/Charmonium/Run2016D-21Feb2020_UL2016_HIPM-v1/MINIAOD', '/Charmonium/Run2016E-21Feb2020_UL2016_HIPM-v1/MINIAOD', '/Charmonium/Run2016F-21Feb2020_UL2016_HIPM-v1/MINIAOD', '/Charmonium/Run2016F-21Feb2020_UL2016-v1/MINIAOD', '/Charmonium/Run2016G-21Feb2020_UL2016-v1/MINIAOD', '/Charmonium/Run2016H-21Feb2020_UL2016-v1/MINIAOD', '/Charmonium/Run2017B-09Aug2019_UL2017-v1/MINIAOD', # UL 2017 42.42 fb-1 '/Charmonium/Run2017C-09Aug2019_UL2017-v1/MINIAOD', '/Charmonium/Run2017D-09Aug2019_UL2017-v1/MINIAOD', '/Charmonium/Run2017E-09Aug2019_UL2017-v1/MINIAOD', '/Charmonium/Run2017F-09Aug2019_UL2017-v1/MINIAOD', '/Charmonium/Run2018A-12Nov2019_UL2018_rsb-v1/MINIAOD', # UL 2018 58.97 fb-1 '/Charmonium/Run2018B-12Nov2019_UL2018-v1/MINIAOD', '/Charmonium/Run2018C-12Nov2019_UL2018_rsb_v2-v2/MINIAOD', '/Charmonium/Run2018D-12Nov2019_UL2018-v1/MINIAOD' ] for inDS in inputDatasets: # inDS is of the form /A/B/C. Since B is unique for each inDS, use this in the CRAB request name. #config.General.requestName = inDS.split('/')[1]+'-'+inDS.split('/')[2] config.General.requestName = inDS.split('/')[1] + '-' + inDS.split( '/')[2] config.Data.inputDataset = inDS config.Data.outputDatasetTag = '%s_%s' % ( config.General.workArea, config.General.requestName) # Submit. try: print "Submitting for input dataset %s" % (inDS) crabCommand(options.crabCmd, config=config, *options.crabCmdOpts.split()) except HTTPException as hte: print "Submission for input dataset %s failed: %s" % ( inDS, hte.headers) except ClientException as cle: print "Submission for input dataset %s failed: %s" % (inDS, cle) # All other commands can be simply executed. elif options.workArea: for dir in os.listdir(options.workArea): projDir = os.path.join(options.workArea, dir) if not os.path.isdir(projDir): continue # Execute the crab command. msg = "Executing (the equivalent of): crab %s --dir %s %s" % ( options.crabCmd, projDir, options.crabCmdOpts) print "-" * len(msg) print msg print "-" * len(msg) try: crabCommand(options.crabCmd, dir=projDir, *options.crabCmdOpts.split()) except HTTPException as hte: print "Failed executing command %s for task %s: %s" % ( options.crabCmd, projDir, hte.headers) except ClientException as cle: print "Failed executing command %s for task %s: %s" % ( options.crabCmd, projDir, cle)