def query_lumis_in_dataset(dataset, current, threads=7): files = query_files(dataset) to_query = Queue() count = 0 for file in files: if file not in current: count += 1 to_query.put(file) log.info("Getting lumis from %i files", count) results = Queue() workers = [] for i in range(threads): worker = Thread(target=query_lumi_from_queue, args=(to_query, results)) worker.start() workers.append(worker) # Wait for everything to be processed try: to_query.join() except KeyboardInterrupt: log.error("Caught Ctrl-C, quitting") log.info("Finished getting lumis") output = copy.deepcopy(current) while not results.empty(): file, lumis = results.get() output[file] = json_summary(lumis) return output
def query_lumis_in_dataset(dataset, current, threads=7): files = query_files(dataset) to_query = Queue() count = 0 for file in files: if file not in current: count += 1 to_query.put(file) log.info("Getting lumis from %i files", count) results = Queue() workers = [] for i in range(threads): worker = Thread(target=query_lumi_from_queue, args = (to_query, results)) worker.start() workers.append(worker) # Wait for everything to be processed try: to_query.join() except KeyboardInterrupt: log.error("Caught Ctrl-C, quitting") log.info("Finished getting lumis") output = copy.deepcopy(current) while not results.empty(): file, lumis = results.get() output[file] = json_summary(lumis) return output
log.info("Extracting meta info from %i files", len(files)) total_events = 0 run_lumis = {} for file in files: log.debug("OPEN file %s", file) tfile = ROOT.TFile.Open(file, "READ") tree = tfile.Get(args.tree) for entry in xrange(tree.GetEntries()): tree.GetEntry(entry) total_events += tree.nevents # We only care about this if we are building the lumimask if args.lumimask: run_lumi = (tree.run, tree.lumi) log.debug("R-L %s %s", repr(run_lumi), file) if run_lumi in run_lumis: log.error("Run-lumi %s found in file \n%s \nand %s!", run_lumi, file, run_lumis[run_lumi]) run_lumis[run_lumi] = file tfile.Close() output = { 'n_evts': total_events, } if args.lumimask: output['lumi_mask'] = json_summary(run_lumis) with open(args.output, 'w') as output_file: output_file.write(json.dumps(output, indent=2, sort_keys=True) + '\n')
log.info("Extracting meta info from %i files", len(files)) total_events = 0 run_lumis = {} for file in files: log.debug("OPEN file %s", file) tfile = ROOT.TFile.Open(file, "READ") tree = tfile.Get(args.tree) for entry in xrange(tree.GetEntries()): tree.GetEntry(entry) total_events += tree.nevents # We only care about this if we are building the lumimask if args.lumimask: run_lumi = (tree.run, tree.lumi) log.debug("R-L %s %s", repr(run_lumi), file) if run_lumi in run_lumis: log.error("Run-lumi %s found in file \n%s \nand %s!", run_lumi, file, run_lumis[run_lumi]) run_lumis[run_lumi] = file tfile.Close() output = { 'n_evts' : total_events, } if args.lumimask: output['lumi_mask'] = json_summary(run_lumis) with open(args.output, 'w') as output_file: output_file.write(json.dumps(output, indent=2, sort_keys=True) + '\n')
Usage: cat mylist.txt | eventlist2lumimask.py > mylist.json Author: Evan K. Friis, UW Madison ''' import sys import json import FinalStateAnalysis.Utilities.lumitools as lumitools if __name__ == "__main__": run_lumis = set([]) for line in sys.stdin.readlines(): # Convert to space separated line = line.replace('*', ' ').strip() if not line or 'Row' in line: continue fields = line.split() # always take last 3 fields assert(len(fields) >= 3) run, lumi, event = int(fields[-3]), int(fields[-2]), int(fields[-1]) run_lumis.add( (run, lumi) ) json.dump(lumitools.json_summary(run_lumis), sys.stdout, indent=2, sort_keys=True)
def query_lumis_in_dataset(dataset): ''' Get all lumis in a dataset ''' lumis = set([]) for file in query_files(dataset): lumis |= query_lumis(file) return json_summary(lumis)
cat mylist.txt | eventlist2lumimask.py > mylist.json Author: Evan K. Friis, UW Madison ''' import sys import json import FinalStateAnalysis.Utilities.lumitools as lumitools if __name__ == "__main__": run_lumis = set([]) for line in sys.stdin.readlines(): # Convert to space separated line = line.replace('*', ' ').strip() if not line or 'Row' in line: continue fields = line.split() # always take last 3 fields assert (len(fields) >= 3) run, lumi, event = int(fields[-3]), int(fields[-2]), int(fields[-1]) run_lumis.add((run, lumi)) json.dump(lumitools.json_summary(run_lumis), sys.stdout, indent=2, sort_keys=True)
def query_lumis_in_dataset(dataset): ''' Get all lumis in a dataset ''' lumis = set([]) for file in query_files(dataset): lumis |= query_lumis(file) return json_summary(lumis)
import json import FinalStateAnalysis.Utilities.lumitools as lumitools from RecoLuminosity.LumiDB import argparse import sys if __name__ == "__main__": parser = argparse.ArgumentParser( description='Subtract one lumi mask from another. ' 'Can add ":first:last" to select a subset of runs') parser.add_argument('lumimask1', help='Lumimask 1') parser.add_argument('operation', help='Operation to apply', choices = ['+', '-', 'and']) parser.add_argument('lumimask2', help='Lumimask 2') args = parser.parse_args() # Load lumis lumis1 = lumitools.lumi_list_from_file(args.lumimask1) lumis2 = lumitools.lumi_list_from_file(args.lumimask2) set_operations = { '+' : set.union, '-' : set.difference, 'and' : set.intersection, } result = set_operations[args.operation](lumis1, lumis2) result_summary = lumitools.json_summary(result) json.dump(result_summary, sys.stdout, indent=2, sort_keys=True)