def mk_secondary_lumimask(dset): dq = das_query("file dataset=%s instance=prod/phys03" % dset, cmd='dasgoclient --dasmaps=./') assert 'data' in dq.keys() fs = [str(f['file'][0]['name']) for f in dq['data']] #fs = fs[:2] print('N files:', len(fs)) lumis = [] dqs = [ das_query("lumi file=%s instance=prod/phys03" % f, cmd='dasgoclient --dasmaps=./') for f in fs ] for dq in dqs: for data in dq['data']: for lumi in data['lumi'][0]['lumi_section_num']: lumis.append([data['lumi'][0]['run_number'], lumi]) jsonList = LumiList(lumis=lumis) #print(jsonList) output_file = dset.split('/')[2].split('-')[1].split('_')[0] #print(output_file) jsonList.writeJSON(output_dir + output_file + '_3photons_imgskim_lumi_list.json')
def __init__(self, name, nanotrees, weight, triggers, jess, jers, mc, injfile, outjfile): self.jess = jess self.jers = jers self.mc = mc self.weight = weight self.__book__(name) # get json file myList = LumiList(filename=injfile) # initialize output lumilist myrunlumi = [] # open root files files = open(nanotrees, "r") # for file in glob.glob(nanotrees + "*root"): for file in files.read().splitlines(): self.Fill(file, triggers, myList, myrunlumi) # only do this for data if not (self.mc): outList = LumiList(lumis=myrunlumi) outList.writeJSON(outjfile) print "got to the end" files.close() self.O.cd() self.O.Write() self.O.Close()
def getStrangeRuns(self): hltInfoByLs =self.analysisOutput() strangeRunsLumis = LumiList( lumis = [[int(hltInf[0]),int(hltInf[1])] for hltInf in hltInfoByLs if not int(hltInf[2][1]) >= 1] ) if self.jsonOutput: strangeRunsLumis.writeJSON(jsonOutput+"_strange") else: return strangeRunsLumis
def getGoodRuns(self): hltInfoByLs =self.analysisOutput() goodRunsAndLumis = LumiList(lumis = [[int(hltInf[0]),int(hltInf[1])] for hltInf in hltInfoByLs if int(hltInf[2][1]) == 1] ) if self.jsonOutput: goodRunsAndLumis.writeJSON(jsonOutput+"_good") return jsonOutput+"_good" else: return goodRunsAndLumis
def getPrescaledRuns(self): hltInfoByLs =self.analysisOutput() prescaledRunsAndLumis = LumiList( lumis = [[int(hltInf[0]),int(hltInf[1])] for hltInf in hltInfoByLs if int(hltInf[2][1]) >= 1] ) if self.jsonOutput: prescaledRunsAndLumis.writeJSON(jsonOutput+"_prescaled") return jsonOutput+"_prescaled" else: return prescaledRunsAndLumis
def savejsons(self, processed): jsondir = os.path.join(self.__plotdir, 'jsons') if not os.path.exists(jsondir): os.makedirs(jsondir) res = {} for label in processed: jsondir = os.path.join('jsons', label) if not os.path.exists(os.path.join(self.__plotdir, jsondir)): os.makedirs(os.path.join(self.__plotdir, jsondir)) lumis = LumiList(lumis=processed[label]) lumis.writeJSON(os.path.join(self.__plotdir, jsondir, 'processed.json')) res[label] = [(os.path.join(jsondir, 'processed.json'), 'processed')] published = os.path.join(self.__workdir, label, 'published.json') if os.path.isfile(published): shutil.copy(published, os.path.join(self.__plotdir, jsondir)) res[label] += [(os.path.join(jsondir, 'published.json'), 'published')] return res
def shortenJson(jsonFile,minRun=0,maxRun=-1,output=None,debug=False): from copy import deepcopy runList = jsonFile if isinstance(runList,LumiList): runList = deepcopy(jsonFile) else: runList = LumiList (filename = jsonFile) # Read in first JSON file allRuns = runList.getRuns() runsToRemove=[] for run in allRuns: if int(run) < minRun: runsToRemove.append (run) if maxRun > 0 and int(run) > maxRun: runsToRemove.append (run) if debug: print " runsToRemove ",runsToRemove runList.removeRuns (runsToRemove) if output: runList.writeJSON (output) else: return runList
from pprint import pprint from FWCore.PythonUtilities.LumiList import LumiList if bool_from_argv('-cleanUp'): print 'executing crab_cleanup()' crab_cleanup() elif bool_from_argv('-countJobs'): print len(crab_jobs_from_argv()) elif bool_from_argv('-expectedLumis'): print 'writing out JSONs for expected lumis:' for d in crab_dirs_from_argv(): print d ll = LumiList(runsAndLumis=crab_lumis_from_arguments_xml(d)) ll.writeJSON(os.path.join(d, 'res/expectedLumis.json')) elif bool_from_argv('-checkLumiDups'): print 'comparing lumiSummary.jsons:' from itertools import combinations files = [os.path.join(d, 'res/lumiSummary.json') for d in crab_dirs_from_argv()] for f1, f2 in combinations(files, 2): print f1, f2 both = LumiList(f1) & LumiList(f2) if both: print '\033[36;7m duplicates: \033[m', both elif bool_from_argv('-addLumiSummaries'): out_fn = 'addedLumiSummaries.json' if os.path.isfile(out_fn): raise ValueError('%s already exists' % out_fn)
handle = Handle ('LumiSummary') label = ('lumiProducer') else: handle, lable = None, None runsLumisDict = {} lumis = Lumis (args) delivered = recorded = 0 for lum in lumis: runList = runsLumisDict.setdefault (lum.aux().run(), []) runList.append( lum.aux().id().luminosityBlock() ) # get the summary and keep track of the totals if options.intLumi: lum.getByLabel (label, handle) summary = handle.product() delivered += summary.avgInsDelLumi() recorded += summary.avgInsRecLumi() # print out lumi sections in JSON format jsonList = LumiList (runsAndLumis = runsLumisDict) if options.output: jsonList.writeJSON (options.output) else: print jsonList # print out integrated luminosity numbers if requested if options.intLumi: print "\nNote: These numbers should be considered approximate. For official numbers, please use lumiCalc.py" print "delivered %.1f mb, recorded %.1f mb" % \ (delivered, recorded)
def makeJSON(optlist): outdir = optlist[0] basedir = optlist[1] lastUnblindRun = optlist[2] name = optlist[3] files = optlist[4:] s = sampleInfo(name,basedir,files) #lumi set for this sample mergedLumisUnblind = set() mergedLumisBlinded = set() for f in s.fileList: file = TFile.Open(f) if file == None: continue # only keep necessary branches t = file.Get("TreeMaker2/PreSelection") if t == None: continue t.SetBranchStatus("*",0) t.SetBranchStatus("RunNum",1) t.SetBranchStatus("LumiBlockNum",1) #get tree entries nentries = t.GetEntries() if nentries==0: continue t.SetEstimate(nentries) t.Draw("RunNum:LumiBlockNum","","goff") v1 = t.GetV1(); v1.SetSize(t.GetSelectedRows()); a1 = array.array('d',v1); v1 = None; v2 = t.GetV2(); v2.SetSize(t.GetSelectedRows()); a2 = array.array('d',v2); v2 = None; #loop over tree entries for run,ls in izip(a1,a2): irun = int(run) ils = int(ls) if irun <= lastUnblindRun or lastUnblindRun==-1: if not (irun,ils) in mergedLumisUnblind: mergedLumisUnblind.add((irun,ils)) else: if not (irun,ils) in mergedLumisBlinded: mergedLumisBlinded.add((irun,ils)) file.Close() ### end loop over files in sample #convert the runlumis from list of pairs to dict: [(123,3), (123,4), (123,5), (123,7), (234,6)] => {123 : [3,4,5,7], 234 : [6]} mLumisDictUnblind = {} mLumisDictBlinded = {} for k, v in mergedLumisUnblind: mLumisDictUnblind.setdefault(k, []).append(int(v)) for k, v in mergedLumisBlinded: mLumisDictBlinded.setdefault(k, []).append(int(v)) #make lumi list from dict mergedLumiListUnblind = LumiList(runsAndLumis=mLumisDictUnblind) mergedLumiListBlinded = LumiList(runsAndLumis=mLumisDictBlinded) if mergedLumiListUnblind: outfile = outdir+'/lumiSummary_unblind_'+s.outName+'.json' mergedLumiListUnblind.writeJSON(outfile) print "wrote "+outfile if mergedLumiListBlinded: outfile = outdir+'/lumiSummary_blinded_'+s.outName+'.json' mergedLumiListBlinded.writeJSON(outfile) print "wrote "+outfile
if args.intersect: edmLumis = edmLumis & impLumis reclumiData=None dellumiData=None if lumiCalc is not None: print "Accessing LumiDB... can take a while..." dellumiData=lumiCalc.deliveredLumiForRange(edmLumis.getCompactList()) reclumiData=lumiCalc.recordedLumiForRange(edmLumis.getCompactList()) totalRec = 0.0 totalDel = 0.0 for dpr in dellumiData: if dpr[2] != 'N/A': totalDel += float(dpr[2]) for dpr in reclumiData: totalRec += lumiCalc.calculateTotalRecorded(dpr[2]) print "Delivered Luminosity: ",totalDel print "Recorded Luminosity: ",totalRec if args.outputJSON: edmLumis.writeJSON(outputJSON) if args.printJSON: print edmLumis.getCompactList() if args.printCMSSWString: print edmLumis.getCMSSWString() if lumiCalc is not None: del lumiCalc
parser.add_option ('--output', dest='output', type='string', help='Save output to file OUTPUT') # required parameters (options, args) = parser.parse_args() if not len (args): raise RuntimeError("Must provide at least one input file") minMaxRE = re.compile (r'(\S+):(\d+)-(\d*)') finalList = LumiList() for filename in args: minRun = maxRun = 0 match = minMaxRE.search (filename) if match: filename = match.group(1) minRun = int( match.group(2) ) try: maxRun = int( match.group(3) ) except: pass if maxRun and minRun > maxRun: raise RuntimeError("Minimum value (%d) is greater than maximum value (%d) for file '%s'" % (minRun, maxRun, filename)) localList = LumiList (filename = filename) filterRuns (localList, minRun, maxRun) finalList = finalList | localList if options.output: finalList.writeJSON (options.output) else: print finalList
help='Save output to file OUTPUT') # required parameters (options, args) = parser.parse_args() if not len(args): raise RuntimeError, "Must provide at least one input file" minMaxRE = re.compile(r'(\S+):(\d+)-(\d*)') finalList = LumiList() for filename in args: minRun = maxRun = 0 match = minMaxRE.search(filename) if match: filename = match.group(1) minRun = int(match.group(2)) try: maxRun = int(match.group(3)) except: pass if maxRun and minRun > maxRun: raise RuntimeError, "Minimum value (%d) is greater than maximum value (%d) for file '%s'" % ( minRun, maxRun, filename) localList = LumiList(filename=filename) filterRuns(localList, minRun, maxRun) finalList = finalList | localList if options.output: finalList.writeJSON(options.output) else: print finalList
if args.intersect: edmLumis = edmLumis & impLumis reclumiData = None dellumiData = None if lumiCalc is not None: print "Accessing LumiDB... can take a while..." dellumiData = lumiCalc.deliveredLumiForRange(edmLumis.getCompactList()) reclumiData = lumiCalc.recordedLumiForRange(edmLumis.getCompactList()) totalRec = 0.0 totalDel = 0.0 for dpr in dellumiData: if dpr[2] != 'N/A': totalDel += float(dpr[2]) for dpr in reclumiData: totalRec += lumiCalc.calculateTotalRecorded(dpr[2]) print "Delivered Luminosity: ", totalDel print "Recorded Luminosity: ", totalRec if args.outputJSON: edmLumis.writeJSON(outputJSON) if args.printJSON: print edmLumis.getCompactList() if args.printCMSSWString: print edmLumis.getCMSSWString() if lumiCalc is not None: del lumiCalc
if args.migrate_parents: parents_to_migrate = list(set([p['parent_logical_file_name'] for p in block['file_parent_list']])) migrate_parents(parents_to_migrate, dbs) if len(block.data['files']) > 0: try: inserted = True dbs['local'].insertBulkBlock(block.data) db.update_published(block.get_publish_update()) logging.info('block inserted: %s' % block['block']['block_name']) except HTTPError, e: logging.critical(e) first_job += args.block_size if inserted: published.update({'dataset': block['dataset']['dataset']}) info = das_interface.get_info(published) lumis = LumiList(lumis=sum(info.lumis.values(), [])) json = os.path.join(workdir, label, 'published.json') lumis.writeJSON(json) logging.info('publishing dataset %s complete' % label) logging.info('json file of published runs and lumis saved to %s' % json) if len(missing) > 0: template = "the following job(s) have not been published because their output could not be found: {0}" logging.warning(template.format(", ".join(map(str, missing))))
a = r.split("-") assert len(a) == 1 or len(a) == 2 b = [] for x in a: c = x.split(":") assert len(c) == 2 for d in c: assert d.isdigit() b.append([int(c[0]), int(c[1])]) if len(a) == 1: compList.setdefault(b[0][0], []).append([b[0][1], b[0][1]]) else: assert b[0][0] == b[1][0] compList.setdefault(b[0][0], []).append([b[0][1], b[1][1]]) lumiList = LumiList(compactList=compList) lumiList.writeJSON("heppy_json.txt") if hasattr(config.components[0], "json"): print "Old heppy json = ", config.components[0].json config.components[0].json = "heppy_json.txt" print "Setting heppy json" os.system("cat heppy_json.txt") from PhysicsTools.HeppyCore.framework.looper import Looper looper = Looper('Output', config, nPrint=1) looper.loop() looper.write() print PSet.process.output.fileName os.system("ls -lR") os.rename("Output/treeProducerSusySingleLepton/tree.root", "tree.root") os.system("ls -lR")
def makeJSON(optlist): outdir = optlist[0] basedir = optlist[1] verbose = optlist[2] name = optlist[3] files = optlist[4:] s = sampleInfo(name, basedir, files) #lumi set for this sample mergedLumis = set() for f in s.fileList: # skip empty paths if f == '': continue # open the file or skip the file if it can't be opened if verbose: print "Trying to open file \"" + f + "\"" file = TFile.Open(f) if file == None: if verbose: print "\tWARNING: Can't open file \"" + f + "\" from sample" + s continue # only keep necessary branches t = file.Get("TreeMaker2/PreSelection") if t == None: continue t.SetBranchStatus("*", 0) t.SetBranchStatus("RunNum", 1) t.SetBranchStatus("LumiBlockNum", 1) #get tree entries nentries = t.GetEntries() if nentries == 0: continue t.SetEstimate(nentries) t.Draw("RunNum:LumiBlockNum", "", "goff") v1 = t.GetV1() v1.SetSize(t.GetSelectedRows()) a1 = array.array('d', v1) v1 = None v2 = t.GetV2() v2.SetSize(t.GetSelectedRows()) a2 = array.array('d', v2) v2 = None #loop over tree entries for run, ls in izip(a1, a2): irun = int(run) ils = int(ls) if not (irun, ils) in mergedLumis: mergedLumis.add((irun, ils)) file.Close() ### end loop over files in sample #convert the runlumis from list of pairs to dict: [(123,3), (123,4), (123,5), (123,7), (234,6)] => {123 : [3,4,5,7], 234 : [6]} mLumisDict = {} for k, v in mergedLumis: mLumisDict.setdefault(k, []).append(int(v)) #make lumi list from dict mergedLumiList = LumiList(runsAndLumis=mLumisDict) if mergedLumiList: outfile = outdir + '/lumiSummary_' + s.outName + '.json' mergedLumiList.writeJSON(outfile) print "wrote " + outfile
if len (eventList) > 20: options.crab = True if options.crab: ########## ## CRAB ## ########## if options.runInteractive: raise RuntimeError, "This job is can not be run interactive, but rather by crab. Please call without '--runInteractive' flag." runsAndLumis = [ (event.run, event.lumi) for event in eventList] json = LumiList (lumis = runsAndLumis) eventsToProcess = '\n'.join(\ sorted( [ "%d:%d" % (event.run, event.event) for event in eventList ] ) ) crabDict = setupCrabDict (options) json.writeJSON (crabDict['json']) target = open (crabDict['runEvent'], 'w') target.write ("%s\n" % eventsToProcess) target.close() target = open (crabDict['crabcfg'], 'w') target.write (crabTemplate % crabDict) target.close print "Please visit CRAB twiki for instructions on how to setup environment for CRAB:\nhttps://twiki.cern.ch/twiki/bin/viewauth/CMS/SWGuideCrab\n" if options.crabCondor: print "You are running on condor. Please make sure you have read instructions on\nhttps://twiki.cern.ch/twiki/bin/view/CMS/CRABonLPCCAF\n" if not os.path.exists ('%s/.profile' % os.environ.get('HOME')): print "** WARNING: ** You are missing ~/.profile file. Please see CRABonLPCCAF instructions above.\n" print "Setup your environment for CRAB. Then edit %(crabcfg)s to make any desired changed. The run:\n\ncrab -create -cfg %(crabcfg)s\ncrab -submit\n" % crabDict else:
# required parameters (options, args) = parser.parse_args() if len(args) != 1: raise RuntimeError("Must provide exactly one input file") if options.min and options.max and options.min > options.max: raise RuntimeError( "Minimum value (%d) is greater than maximum value (%d)" % (options.min, options.max)) commaRE = re.compile(r',') runsToRemove = [] for chunk in options.runs: runs = commaRE.split(chunk) runsToRemove.extend(runs) alphaList = LumiList(filename=args[0]) # Read in first JSON file allRuns = alphaList.getRuns() for run in allRuns: if options.min and int(run) < options.min: runsToRemove.append(run) if options.max and int(run) > options.max: runsToRemove.append(run) alphaList.removeRuns(runsToRemove) if options.output: alphaList.writeJSON(options.output) else: print(alphaList)
eventList.append(event) source.close() if options.crab: ########## ## CRAB ## ########## if options.runInteractive: raise RuntimeError, "This job is can not be run interactive, but rather by crab. Please call without '--runInteractive' flag." runsAndLumis = [(event.run, event.lumi) for event in eventList] json = LumiList(lumis=runsAndLumis) eventsToProcess = '\n'.join(\ sorted( [ "%d:%d" % (event.run, event.event) for event in eventList ] ) ) crabDict = setupCrabDict(options) json.writeJSON(crabDict['json']) target = open(crabDict['runEvent'], 'w') target.write("%s\n" % eventsToProcess) target.close() target = open(crabDict['crabcfg'], 'w') target.write(crabTemplate % crabDict) target.close print "Please visit CRAB twiki for instructions on how to setup environment for CRAB:\nhttps://twiki.cern.ch/twiki/bin/viewauth/CMS/SWGuideCrab\n" if options.crabCondor: print "You are running on condor. Please make sure you have read instructions on\nhttps://twiki.cern.ch/twiki/bin/view/CMS/CRABonLPCCAF\n" if not os.path.exists('%s/.profile' % os.environ.get('HOME')): print "** WARNING: ** You are missing ~/.profile file. Please see CRABonLPCCAF instructions above.\n" print "Setup your environment for CRAB. Then edit %(crabcfg)s to make any desired changed. The run:\n\ncrab -create -cfg %(crabcfg)s\ncrab -submit\n" % crabDict else:
#!/usr/bin/env python raise NotImplementedError('run2 sample arch') import sys, os from FWCore.PythonUtilities.LumiList import LumiList from JMTucker.Tools.ROOTTools import * import JMTucker.Tools.Samples as Samples for fn in sys.argv[1:]: if os.path.isfile(fn): print fn name = os.path.basename(fn).replace('.root', '') sample = getattr(Samples, name) f = ROOT.TFile(fn) t = f.Get('evids/event_ids') n = t.GetEntries() n2 = int(sample.nevents_orig/2 * sample.ana_filter_eff) print '%30s: %s %s %s' % (name, n, n2, '\033[36;7m not equal \033[m' if n != n2 else '') rles = list(detree(t)) assert len(rles) == n ll = LumiList(lumis=[(r,l) for r,l,e in rles]) ll.writeJSON('%s.json' % name)
def main(argv = None): """Main routine of the script. Arguments: - `argv`: arguments passed to the main routine """ if argv == None: argv = sys.argv[1:] parser = argparse.ArgumentParser( description="Create JSON selection for a given magnetic field.") parser.add_argument("-i", "--input", dest="input", metavar="JSON", type=str, help="input JSON file") parser.add_argument("-o", "--output", dest="output", metavar="JSON", type=str, help="output JSON file") parser.add_argument("--min", dest="min", metavar="RUN", type=int, help="first run to be considered in the selection") parser.add_argument("--max", dest="max", metavar="RUN", type=int, help="last run to be considered in the selection") parser.add_argument("--epsilon", dest="epsilon", metavar="TESLA", default=0.1, type=float, help="precision of the filter (default: %(default)s T)") parser.add_argument("--debug", dest="debug", action="store_true", help="show more verbose output") required = parser.add_argument_group("required arguments") required.add_argument("--b-field", dest="bfield", metavar="TESLA", required=True, type=float, help="magnetic field to filter") args = parser.parse_args(argv) try: if args.input == None and (args.min == None or args.max == None): msg = ("If no input JSON file ('--input') is provided, you have to " "explicitly provide the first ('--min') and last ('--max') " "run.") raise RuntimeError(msg) if args.min != None and args.max != None and args.min > args.max: msg = "First run ({min:d}) is after last run ({max:d})." msg = msg.format(**args.__dict__) raise RuntimeError(msg) if args.max != None and args.max <= 0: msg = "Last run must be greater than zero: max = {0:d} <= 0." msg = msg.format(args.max) raise RuntimeError(msg) except RuntimeError as e: if args.debug: raise print ">>>", os.path.splitext(os.path.basename(__file__))[0]+":", str(e) sys.exit(1) lumi_list = None if not args.input else LumiList(filename = args.input) input_runs = None if not lumi_list else [int(r) for r in lumi_list.getRuns()] # Run registry API: https://twiki.cern.ch/twiki/bin/viewauth/CMS/DqmRrApi URL = "http://runregistry.web.cern.ch/runregistry/" api = rrapi.RRApi(URL, debug = args.debug) if api.app != "user": return column_list = ("number",) min_run = args.min if args.min != None else input_runs[0] max_run = args.max if args.max != None else input_runs[-1] bfield_min = args.bfield - args.epsilon bfield_max = args.bfield + args.epsilon constraints = { "datasetExists": "= true", "number": ">= {0:d} and <= {1:d}".format(min_run, max_run), "bfield": "> {0:f} and < {1:f}".format(bfield_min, bfield_max) } run_list = [item["number"] for item in api.data(workspace = "GLOBAL", table = "runsummary", template = "json", columns = column_list, filter = constraints)] if lumi_list != None: runs_to_remove = [] for run in input_runs: if run not in run_list: runs_to_remove.append(run) lumi_list.removeRuns(runs_to_remove) else: lumi_list = LumiList(runs = run_list) if args.output != None: lumi_list.writeJSON(args.output) with open(args.output+".args", "w") as f: f.write(" ".join(argv)+"\n") else: print lumi_list
commaRE = re.compile (r',') alphaList = LumiList (filename = args[0]) # Read in first JSON file allLumis = alphaList.getLumis() count_lumis = 0 for (run, lumi) in allLumis: count_lumis += 1 lumis_per_sec = count_lumis/options.sec print 'Found %s lumis in file.' % count_lumis print 'Splitting into %s sections with %s lumis each.' % (options.sec, lumis_per_sec) count_lumis = 0 for sec in range(options.sec): ibegin = sec*lumis_per_sec iend = ibegin + lumis_per_sec if sec == options.sec - 1: iend = len(allLumis) tempList = LumiList (lumis = allLumis[ibegin:iend]) print 'Part %s: num lumis = %s' % (sec+1, len(tempList.getLumis())) count_lumis += len(tempList.getLumis()) filename = '%s_%s_%s' % (options.outbase, sec+1, options.outend) tempList.writeJSON (filename) print 'Total lumis = %s' % count_lumis
ls = set(l for r,l in lumi_mask) if ls == set([-1]): is_mc = True elif -1 in ls: raise ValueError('batch for dataset %s has lumis -1 and others' % dataset) else: is_mc = False if not is_mc: job_control = ''' lumi_mask = pick_events.json total_number_of_lumis = -1 lumis_per_job = 1''' ll = LumiList(lumis=lumi_mask) ll.writeJSON('pick_events.json') else: job_control = ''' total_number_of_events = -1 events_per_job = 100000''' scheduler = 'condor' if 'condor' in sys.argv else 'glite' open('crab.cfg', 'wt').write(crab_cfg % locals()) pset = open('pick_events.py').read() pset += '\nevents_to_process = ' pset += pformat(events_to_process) pset += '\nset_events_to_process(process, events_to_process)\n' open('pick_events_crab.py', 'wt').write(pset) if not just_testing:
def subtractLumis(json, jsonToSubtract): if (debug): print 'Subtracting %s from %s' % (jsonToSubtract, json) lumis = LumiList(filename=json) - LumiList(filename=jsonToSubtract) lumis.writeJSON(fileName=json)
parser.add_option ('--output', dest='output', type='string', help='Save output to file OUTPUT') # required parameters (options, args) = parser.parse_args() if len (args) != 1: raise RuntimeError, "Must provide exactly one input file" if options.min and options.max and options.min > options.max: raise RuntimeError, "Minimum value (%d) is greater than maximum value (%d)" % (options.min, options.max) commaRE = re.compile (r',') runsToRemove = [] for chunk in options.runs: runs = commaRE.split (chunk) runsToRemove.extend (runs) alphaList = LumiList (filename = args[0]) # Read in first JSON file allRuns = alphaList.getRuns() for run in allRuns: if options.min and int(run) < options.min: runsToRemove.append (run) if options.max and int(run) > options.max: runsToRemove.append (run) alphaList.removeRuns (runsToRemove) if options.output: alphaList.writeJSON (options.output) else: print alphaList
def mergeLumis(json, jsonToMerge): if (debug): print 'Merging %s into %s' % (jsonToMerge, json) lumis = LumiList(filename=json) + LumiList(filename=jsonToMerge) lumis.writeJSON(fileName=json)
runNumber = int(run.ID) runList = runsLumisDict.setdefault(runNumber, []) for lumiPiece in run.LumiSection: lumi = int(lumiPiece.ID) runList.append(lumi) except: try: # JSON-like version in CRAB XML files, runObjects is usually a list if isinstance(inputFile.Runs, str): runObjects = [inputFile.Runs] else: runObjects = inputFile.Runs for runObject in runObjects: try: runs = ast.literal_eval(runObject) for (run, lumis) in runs.iteritems(): runList = runsLumisDict.setdefault( int(run), []) runList.extend(lumis) except ValueError: # Old style handled above pass except: print "Run missing in '%s'. Skipping." % fjr continue jsonList = LumiList(runsAndLumis=runsLumisDict) if options.output: jsonList.writeJSON(options.output) else: print jsonList
constraints = { "datasetExists": "= true", "number": ">= {0:d} and <= {1:d}".format(min_run, max_run), "bfield": "> {0:f} and < {1:f}".format(bfield_min, bfield_max) } run_list = [item["number"] for item in api.data(workspace = "GLOBAL", table = "runsummary", template = "json", columns = column_list, filter = constraints)] if lumi_list != None: runs_to_remove = [] for run in input_runs: if run not in run_list: runs_to_remove.append(run) lumi_list.removeRuns(runs_to_remove) else: lumi_list = LumiList(runs = run_list) if args.output != None: lumi_list.writeJSON(args.output) with open(args.output+".args", "w") as f: f.write(" ".join(argv)+"\n") else: print lumi_list if __name__ == "__main__": main()