def limit(signal_n, background_n, bkg_uncert): signal = TH1F("signal" + str(random.randrange(1e6)), "signal", 1, 0, 1) background = TH1F("background" + str(random.randrange(1e6)), "background", 1, 0, 1) data = TH1F("data" + str(random.randrange(1e6)), "data", 1, 0, 1) signal.Sumw2() signal.SetBinContent(1, signal_n) signal.SetBinError(1, sqrt(signal_n)) background.Sumw2() background.SetBinContent(1, background_n) background.SetBinError(1, sqrt(background_n)) errorsignal = TVectorD(1) errorbackground = TVectorD(1) errorsignal[0] = 0.20 # hardcoded to 2015 approximate value errorbackground[0] = bkg_uncert names = TObjArray() name1 = TObjString("bkg uncertainty") name2 = TObjString("sig uncertainty") names.AddLast(name1) names.AddLast(name2) datasource = TLimitDataSource() datasource = TLimitDataSource(signal, background, data, errorsignal, errorbackground, names) confidence = TConfidenceLevel(TLimit.ComputeLimit(datasource, 5000)) return 1 - confidence.GetExpectedCLs_b()
def recursiveMerge(target, infile, path='', cache={'TOTALLUMI': 0}, cutflow=True): l = infile.GetDirectory(path) keys = l.GetListOfKeys() cycles = {} for entry in range(keys.GetEntries()): name = keys.At(entry).GetName() + ";" + str(keys.At(entry).GetCycle()) if path: cachename = path + "/" + name else: cachename = name obj = l.Get(name) if type(obj) == TDirectoryFile: #print obj, "DIRECTORY" targetpath = keys.At(entry).GetName() if not target.Get(targetpath): target.mkdir(targetpath) recursiveMerge(target, infile, path + "/" + obj.GetName(), cache) elif type(obj) == TTree: # print obj, cachename, "TTree" cyclename, cyclenumber = cachename.split(';') if cyclename in cycles: continue # print cachename, "Used!" cycles[cyclename] = cyclenumber if not cyclename in cache: target.cd(path) cache[cyclename] = obj.CloneTree() else: objcached = cache[cyclename] col = TObjArray() col.Add(obj) objcached.Merge(col) elif issubclass(obj.__class__, TH1): #print obj, "TH1" if not cutflow and keys.At(entry).GetName() == "CutFlow": continue if not cachename in cache: target.cd(path) cache[cachename] = obj.Clone() else: objcached = cache[cachename] col = TObjArray() col.Add(obj) objcached.Merge(col) elif type(obj) == TObjString: #print type(obj), name, "TObjString" if obj: target.cd(path) objnew = TObjString(obj.GetString().Data()) objnew.Write(keys.At(entry).GetName()) cache['TOTALLUMI'] += 1 else: print "UNKNOWN OBJECT", name, "OF TYPE", type(obj)
def dict2rootmap(dict): '''assumes keys are strings, and vals are lists of strings''' map = TMap() for (key, entries) in dict.iteritems(): rootkey = TObjString(key) rootval = TList() for entry in entries: rootval.Add(TObjString(entry)) map.Add(rootkey, rootval) return map
def test_lt(self): tos1 = TObjString(self.test_str1) tos2 = TObjString(self.test_str2) # Comparison between TObjStrings self.assertTrue(tos1 < tos2) self.assertFalse(tos2 < tos1) # Comparison with TString ts1 = TString(self.test_str1) ts2 = TString(self.test_str2) self.assertTrue(tos1 < ts2) self.assertFalse(tos2 < ts1) # Comparison with Python string self.assertTrue(tos1 < self.test_str2) self.assertFalse(tos2 < self.test_str1)
def test_list_sort(self): l1 = [TObjString(str(i)) for i in range(self.num_elems)] l2 = list(reversed(l1)) self.assertNotEqual(l1, l2) # Test that comparison operators enable list sorting l2.sort() self.assertEqual(l1, l2)
def copyLumi(inputdir): inputpath = listifyInputFiles(inputdir) lumidir = outputFile.mkdir("Lumi") for d in inputpath: f = TFile.Open(d) try: l = f.GetDirectory("Lumi") keys = l.GetListOfKeys() for entry in range(keys.GetEntries()): objstr = l.Get( keys.At(entry).GetName() + ";" + str(keys.At(entry).GetCycle())) if objstr: lumidir.cd() objnew = TObjString(objstr.GetString().Data()) objnew.Write(keys.At(entry).GetName()) except: pass f.Close() outputFile.cd()
def test_ne(self): tos1 = TObjString(self.test_str1) tos2 = TObjString(self.test_str1) tos3 = TObjString(self.test_str2) # Comparison between TObjStrings self.assertFalse(tos1 != tos2) self.assertTrue(tos1 != tos3) # Comparison with TString ts1 = TString(self.test_str1) ts2 = TString(self.test_str2) self.assertFalse(tos1 != ts1) self.assertTrue(tos1 != ts2) # Comparison with Python string self.assertFalse(tos1 != self.test_str1) self.assertTrue(tos1 != self.test_str2) # Comparison with non-string self.assertTrue(tos1 != 1)
def test_dynamiccast(self): tobj_class = TClass.GetClass("TObject") tobjstr_class = TClass.GetClass("TObjString") o = TObjString("a") # Upcast: TObject <- TObjString o_upcast = tobjstr_class.DynamicCast(tobj_class, o) self.assertEquals(type(o_upcast), TObject) # Downcast: TObject -> TObjString o_downcast = tobjstr_class.DynamicCast(tobj_class, o_upcast, False) self.assertEquals(type(o_downcast), TObjString)
def test1Strings(self): """Test string/TString/TObjString compatibility""" pyteststr = "aap noot mies" s1 = TString(pyteststr) s2 = str(s1) self.assertEqual(s1, s2) s3 = TObjString(s2) self.assertEqual(s2, s3) self.assertEqual(s2, pyteststr)
class PickledStringMerger(DefaultMerger): pyobject_merge_registry = { int: int.__add__, long: long.__add__, set: set.union, list: list.__add__, str: str.__add__, unicode: unicode.__add__, dict: dict_merge, defaultdict: dict_merge, } def unpickle_string(self, string): try: return loads(string.GetName()) except UnpicklingError: raise UnableToMerge def __init__(self, first_object, target_directory): self.merged_object = self.unpickle_string(first_object) t = type(self.merged_object) self.merger_function = self.pyobject_merge_registry.get(t) if not self.merger_function: raise RuntimeError("I don't know how to merge objects of type %r" % t) def merge(self, next_object): do_merge = self.merger_function try: next_object = self.unpickle_string(next_object) except UnpicklingError: # Silently ignore non-python strings return self.merged_object = do_merge(self.merged_object, next_object) #UnpicklingError #new_value = self.merged_object.GetVal() + next_object.GetVal() #self.merged_object.SetVal(new_value) def finish(self, key_name): self.merged_object = TObjString(dumps(self.merged_object)) self.merged_object.Write(key_name)
def test2Lists(self): """Test list/TList behavior and compatibility""" l = TList() l.Add(TObjString('a')) l.Add(TObjString('b')) l.Add(TObjString('c')) l.Add(TObjString('d')) l.Add(TObjString('e')) l.Add(TObjString('f')) l.Add(TObjString('g')) l.Add(TObjString('h')) l.Add(TObjString('i')) l.Add(TObjString('j')) self.assertEqual(len(l), 10) self.assertEqual(l[3], 'd') self.assertEqual(l[-1], 'j') self.assertRaises(IndexError, l.__getitem__, 20) self.assertRaises(IndexError, l.__getitem__, -20) self.assertEqual(list(l), ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j']) l[3] = TObjString('z') self.assertEqual(list(l), ['a', 'b', 'c', 'z', 'e', 'f', 'g', 'h', 'i', 'j']) del l[2] self.assertEqual(list(l), ['a', 'b', 'z', 'e', 'f', 'g', 'h', 'i', 'j']) self.assert_(TObjString('b') in l) self.assert_(not TObjString('x') in l) self.assertEqual(list(l[2:6]), ['z', 'e', 'f', 'g']) self.assertEqual(list(l[2:6:2]), ['z', 'f']) self.assertEqual(list(l[-5:-2]), ['f', 'g', 'h']) self.assertEqual(list(l[7:]), ['i', 'j']) self.assertEqual(list(l[:3]), ['a', 'b', 'z']) del l[2:4] self.assertEqual(list(l), ['a', 'b', 'f', 'g', 'h', 'i', 'j']) l[2:5] = [TObjString('1'), TObjString('2')] self.assertEqual(list(l), ['a', 'b', '1', '2', 'i', 'j']) l[6:6] = [TObjString('3')] self.assertEqual(list(l), ['a', 'b', '1', '2', 'i', 'j', '3']) l.append(TObjString('4')) self.assertEqual(list(l), ['a', 'b', '1', '2', 'i', 'j', '3', '4']) l.extend([TObjString('5'), TObjString('j')]) self.assertEqual(list(l), ['a', 'b', '1', '2', 'i', 'j', '3', '4', '5', 'j']) self.assertEqual(l.count('b'), 1) self.assertEqual(l.count('j'), 2) self.assertEqual(l.count('x'), 0) self.assertEqual(l.index(TObjString('i')), 4) self.assertRaises(ValueError, l.index, TObjString('x')) l.insert(3, TObjString('6')) l.insert(20, TObjString('7')) l.insert(-1, TObjString('8')) if not self.legacy_pyroot: # The pythonisation of TSeqCollection in experimental PyROOT mimics the # behaviour of the Python list, in this case for insert. # The Python list insert always inserts before the specified index, so if # -1 is specified, insert will place the new element right before the last # element of the list. self.assertEqual(list(l), [ 'a', 'b', '1', '6', '2', 'i', 'j', '3', '4', '5', 'j', '8', '7' ]) # Re-synchronize with current PyROOT's list l.insert(0, TObjString('8')) self.assertEqual(list(l), [ '8', 'a', 'b', '1', '6', '2', 'i', 'j', '3', '4', '5', 'j', '8', '7' ]) l.pop(-2) self.assertEqual(list(l), [ '8', 'a', 'b', '1', '6', '2', 'i', 'j', '3', '4', '5', 'j', '7' ]) else: self.assertEqual(list(l), [ '8', 'a', 'b', '1', '6', '2', 'i', 'j', '3', '4', '5', 'j', '7' ]) self.assertEqual(l.pop(), '7') self.assertEqual(l.pop(3), '1') self.assertEqual( list(l), ['8', 'a', 'b', '6', '2', 'i', 'j', '3', '4', '5', 'j']) l.remove(TObjString('j')) l.remove(TObjString('3')) self.assertRaises(ValueError, l.remove, TObjString('x')) self.assertEqual(list(l), ['8', 'a', 'b', '6', '2', 'i', '4', '5', 'j']) l.reverse() self.assertEqual(list(l), ['j', '5', '4', 'i', '2', '6', 'b', 'a', '8']) l.sort() self.assertEqual(list(l), ['2', '4', '5', '6', '8', 'a', 'b', 'i', 'j']) if sys.hexversion >= 0x3000000: l.sort(key=TObjString.GetName) l.reverse() else: l.sort(lambda a, b: cmp(b.GetName(), a.GetName())) self.assertEqual(list(l), ['j', 'i', 'b', 'a', '8', '6', '5', '4', '2']) l2 = l[:3] self.assertEqual(list(l2 * 3), ['j', 'i', 'b', 'j', 'i', 'b', 'j', 'i', 'b']) self.assertEqual(list(3 * l2), ['j', 'i', 'b', 'j', 'i', 'b', 'j', 'i', 'b']) l2 *= 3 self.assertEqual(list(l2), ['j', 'i', 'b', 'j', 'i', 'b', 'j', 'i', 'b']) l2 = l[:3] l3 = l[6:8] self.assertEqual(list(l2 + l3), ['j', 'i', 'b', '5', '4']) if sys.hexversion >= 0x3000000: next = '__next__' else: next = 'next' i = iter(l2) self.assertEqual(getattr(i, next)(), 'j') self.assertEqual(getattr(i, next)(), 'i') self.assertEqual(getattr(i, next)(), 'b') self.assertRaises(StopIteration, getattr(i, next))
def test2Lists(self): """Test list/TList behavior and compatibility""" l = TList() l.Add(TObjString('a')) l.Add(TObjString('b')) l.Add(TObjString('c')) l.Add(TObjString('d')) l.Add(TObjString('e')) l.Add(TObjString('f')) l.Add(TObjString('g')) l.Add(TObjString('h')) l.Add(TObjString('i')) l.Add(TObjString('j')) self.assertEqual(len(l), 10) self.assertEqual(l[3], 'd') self.assertEqual(l[-1], 'j') self.assertRaises(IndexError, l.__getitem__, 20) self.assertRaises(IndexError, l.__getitem__, -20) self.assertEqual(list(l), ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j']) l[3] = TObjString('z') self.assertEqual(list(l), ['a', 'b', 'c', 'z', 'e', 'f', 'g', 'h', 'i', 'j']) del l[2] self.assertEqual(list(l), ['a', 'b', 'z', 'e', 'f', 'g', 'h', 'i', 'j']) self.assert_(TObjString('b') in l) self.assert_(not TObjString('x') in l) self.assertEqual(list(l[2:6]), ['z', 'e', 'f', 'g']) self.assertEqual(list(l[2:6:2]), ['z', 'f']) self.assertEqual(list(l[-5:-2]), ['f', 'g', 'h']) self.assertEqual(list(l[7:]), ['i', 'j']) self.assertEqual(list(l[:3]), ['a', 'b', 'z']) del l[2:4] self.assertEqual(list(l), ['a', 'b', 'f', 'g', 'h', 'i', 'j']) l[2:5] = [TObjString('1'), TObjString('2')] self.assertEqual(list(l), ['a', 'b', '1', '2', 'i', 'j']) l[6:6] = [TObjString('3')] self.assertEqual(list(l), ['a', 'b', '1', '2', 'i', 'j', '3']) l.append(TObjString('4')) self.assertEqual(list(l), ['a', 'b', '1', '2', 'i', 'j', '3', '4']) l.extend([TObjString('5'), TObjString('j')]) self.assertEqual(list(l), ['a', 'b', '1', '2', 'i', 'j', '3', '4', '5', 'j']) self.assertEqual(l.count('b'), 1) self.assertEqual(l.count('j'), 2) self.assertEqual(l.count('x'), 0) self.assertEqual(l.index(TObjString('i')), 4) self.assertRaises(ValueError, l.index, TObjString('x')) l.insert(3, TObjString('6')) l.insert(20, TObjString('7')) l.insert(-1, TObjString('8')) self.assertEqual( list(l), ['8', 'a', 'b', '1', '6', '2', 'i', 'j', '3', '4', '5', 'j', '7']) self.assertEqual(l.pop(), '7') self.assertEqual(l.pop(3), '1') self.assertEqual( list(l), ['8', 'a', 'b', '6', '2', 'i', 'j', '3', '4', '5', 'j']) l.remove(TObjString('j')) l.remove(TObjString('3')) self.assertRaises(ValueError, l.remove, TObjString('x')) self.assertEqual(list(l), ['8', 'a', 'b', '6', '2', 'i', '4', '5', 'j']) l.reverse() self.assertEqual(list(l), ['j', '5', '4', 'i', '2', '6', 'b', 'a', '8']) l.sort() self.assertEqual(list(l), ['2', '4', '5', '6', '8', 'a', 'b', 'i', 'j']) if sys.hexversion >= 0x3000000: l.sort(key=TObjString.GetName) l.reverse() else: l.sort(lambda a, b: cmp(b.GetName(), a.GetName())) self.assertEqual(list(l), ['j', 'i', 'b', 'a', '8', '6', '5', '4', '2']) l2 = l[:3] self.assertEqual(list(l2 * 3), ['j', 'i', 'b', 'j', 'i', 'b', 'j', 'i', 'b']) self.assertEqual(list(3 * l2), ['j', 'i', 'b', 'j', 'i', 'b', 'j', 'i', 'b']) l2 *= 3 self.assertEqual(list(l2), ['j', 'i', 'b', 'j', 'i', 'b', 'j', 'i', 'b']) l2 = l[:3] l3 = l[6:8] self.assertEqual(list(l2 + l3), ['j', 'i', 'b', '5', '4']) if sys.hexversion >= 0x3000000: next = '__next__' else: next = 'next' i = iter(l2) self.assertEqual(getattr(i, next)(), 'j') self.assertEqual(getattr(i, next)(), 'i') self.assertEqual(getattr(i, next)(), 'b') self.assertRaises(StopIteration, getattr(i, next))
trig = root_open('TriggerSF_v1.root') lepid = root_open('ideff.root') iso = root_open('tracking_eff.root') #tracking eff as iso given is a 2D plot #trk = root_open('ratios.root') trg = transpose(trig.Ele32_eta2p1_WPTight_Gsf__EffData) # trg1 = trig.IsoMu22_OR_IsoTkMu22_PtEtaBins_Run273158_to_274093.efficienciesDATA.abseta_pt_DATA # trg2 = trig.IsoMu22_OR_IsoTkMu22_PtEtaBins_Run274094_to_276097.efficienciesDATA.abseta_pt_DATA # trg = trg1*0.0482 + trg2*0.9517 # # htrk = graph2hist(trk.ratio_eta) #read itself and dump code = TObjString(open('merge_sf.py').read()) info = Hist(3, 0, 3, type='I') info[0].value = 0 #0 pt as Y, 1 pt as X info[1].value = 0 #trig SF in |eta| (1) of full eta (0) info[2].value = 0 #ID SF in |eta| (1) of full eta (0) info[3].value = 0 #Iso SF in |eta| (1) of full eta (0) info[4].value = 0 #tracking SF in |eta| (1) of full eta (0) with root_open('output.root', 'w') as out: out.WriteTObject(trg, 'trg') out.WriteTObject(fill_oflow(lepid.EGamma_SF2D.Clone()), 'id') out.WriteTObject(fill_oflow(iso.EGamma_SF2D.Clone()), 'iso') # out.WriteTObject(htrk, 'trk') out.WriteTObject(info, 'info') out.WriteTObject(code, 'code')
def makeCard(TYPE, mZp, mChi, DIR): # Setup input and output files indir = DIR old_str = "" new_str = "" if (TYPE == "BARY"): old_str = "sig_ZpBaryonic_mZP10_mChi1" new_str = "sig_ZpBaryonic_mZP" + mZp + "_mChi" + mChi if (TYPE == "2HDM"): old_str = "sig_2HDM_mZP600_mA0300" new_str = "sig_2HDM_mZP" + mZp + "_mA0" + mChi fin = open(indir + "dataCard_" + old_str + "_13TeV.txt", "r") fout = open(indir + "dataCard_" + new_str + "_13TeV.txt", "w") rin = ROOT.TFile(indir + old_str + "_13TeV.root") rout = ROOT.TFile(indir + new_str + "_13TeV.root", "RECREATE") # Copy the datacard for new mass point for line in fin: if old_str in line: line = line.replace(old_str, new_str) fout.write(line) # Get the old and new eff mZ = float(mZp) mDM = float(mChi) old_efflowMET = 1.0 old_effhighMET = 1.0 new_efflowMET = 1.0 new_effhighMET = 1.0 if (TYPE == "BARY"): old_efflowMET = getEffBary(0, 10, 1) old_effhighMET = getEffBary(1, 10, 1) new_efflowMET = getEffBary(0, mZ, mDM) new_effhighMET = getEffBary(1, mZ, mDM) if (TYPE == "2HDM"): old_efflowMET = getEff2HDM(0, 600, 300) old_effhighMET = getEff2HDM(1, 600, 300) new_efflowMET = getEff2HDM(0, mZ, mDM) new_effhighMET = getEff2HDM(1, mZ, mDM) scale_lowMET = new_efflowMET / old_efflowMET scale_highMET = new_effhighMET / old_effhighMET #print("Old eff: low = %f, high = %f" %(old_efflowMET,old_effhighMET)) #print("New eff: low = %f, high = %f" %(new_efflowMET,new_effhighMET)) #print("Scale: low = %f, high = %f" %(scale_lowMET,scale_highMET)) # Copy the input file in_TObjString = TObjString(rin.cfg) out_TObjString = in_TObjString.Clone() in_RooWorkspace = RooWorkspace(rin.wtemplates) #in_RooWorkspace.Print() # print obj in input rooWorkspace w1 = ROOT.RooWorkspace("wtemplates") w1.rooImport = getattr(w1, 'import') var1 = in_RooWorkspace.var('mgg') var2 = in_RooWorkspace.var('model_signal_' + old_str + '_13TeV_met0-130_norm') var3 = in_RooWorkspace.var('model_signal_' + old_str + '_13TeV_met130_norm') # multiply old normalization by new scale valnorm_lowMET = scale_lowMET * var2.getValV() valnorm_highMET = scale_highMET * var3.getValV() norm1 = RooRealVar("model_signal_" + new_str + "_13TeV_met0-130_norm", "model_signal" + new_str + "13TeV_met0-130_norm", valnorm_lowMET) norm2 = RooRealVar("model_signal_" + new_str + "_13TeV_met130_norm", "model_signal" + new_str + "13TeV_met130_norm", valnorm_highMET) varlist = ROOT.RooArgList(var1, norm1, norm2) #print("%f * %f" %(scale_lowMET,var2.getValV())) #print("%f" %valnorm_lowMET) #print("%f" %norm1.getValV()) # get old pdfs and change names pdf1 = in_RooWorkspace.pdf('model_signal_' + old_str + '_13TeV_met0-130') pdf2 = in_RooWorkspace.pdf('model_signal_' + old_str + '_13TeV_met0-130_energyScalemet0-130Down') pdf3 = in_RooWorkspace.pdf('model_signal_' + old_str + '_13TeV_met0-130_energyScalemet0-130Up') pdf4 = in_RooWorkspace.pdf('model_signal_' + old_str + '_13TeV_met130') pdf5 = in_RooWorkspace.pdf('model_signal_' + old_str + '_13TeV_met130_energyScalemet130Down') pdf6 = in_RooWorkspace.pdf('model_signal_' + old_str + '_13TeV_met130_energyScalemet130Up') pdf1new = ROOT.RooHistPdf(pdf1, "model_signal_" + new_str + "_13TeV_met0-130") pdf2new = ROOT.RooHistPdf( pdf2, "model_signal_" + new_str + "_13TeV_met0-130_energyScalemet0-130Down") pdf3new = ROOT.RooHistPdf( pdf3, "model_signal_" + new_str + "_13TeV_met0-130_energyScalemet0-130Up") pdf4new = ROOT.RooHistPdf(pdf4, "model_signal_" + new_str + "_13TeV_met130") pdf5new = ROOT.RooHistPdf( pdf5, "model_signal_" + new_str + "_13TeV_met130_energyScalemet130Down") pdf6new = ROOT.RooHistPdf( pdf6, "model_signal_" + new_str + "_13TeV_met130_energyScalemet130Up") # these are null pointers -- probably don't have right classes (missing from dipho analysis) to read them # but they are also not needed for running higgs combine so left out for now dat1 = in_RooWorkspace.data('signal_' + old_str + '_13TeV_met130') dat2 = in_RooWorkspace.data('signalforPdf_' + old_str + '_13TeV_met130') dat3 = in_RooWorkspace.data('signal_' + old_str + '_13TeV_met0-130') dat4 = in_RooWorkspace.data('signalforPdf_' + old_str + '_13TeV_met0-130') #print("%f" %dat1.sumEntries()) # Write to output file #out_TObjString.Write() w1.rooImport(var1) w1.rooImport(norm1) w1.rooImport(norm2) w1.rooImport(pdf1new) w1.rooImport(pdf2new) w1.rooImport(pdf3new) w1.rooImport(pdf4new) w1.rooImport(pdf5new) w1.rooImport(pdf6new) #w1.Print() # print contents of workspace w1.Write() rout.Close()
parser.add_argument( '--category', default='*', help='category to be used for drawing correlation matrix (POSIX regex)') parser.add_argument('--pickEvery', type=int, default=10, help='pick one event every to draw the correlation matrix') parser.add_argument('--batch', action='store_true', help='batch mode') args = parser.parse_args() args_dict = deepcopy(args.__dict__) current_file = os.path.abspath(inspect.getfile(inspect.currentframe())) watermark = TObjString(prettyjson.dumps(args_dict)) codeset = open(current_file).read( ) #zlib.compress(open(current_file).read()) compressing does not work well with root... codemark = TObjString(codeset) # # CORRELATION MATRIX # scripts_dir = os.path.join(os.environ['CTRAIN'], 'scripts') fname_regex = re.compile( '[a-zA-Z_0-9]+_(?P<category>[a-zA-Z]+)_(?P<flavor>[A-Z]+)\.root') qcd_txt_path = os.path.join(scripts_dir, 'data/flat_trees/qcd_flat.list') input_files = [i.strip() for i in open(qcd_txt_path)] if args.category != '*': input_files = [
def finish(self, key_name): self.merged_object = TObjString(dumps(self.merged_object)) self.merged_object.Write(key_name)
def divideEtaTH1Ds(etaHist, numCat, sourceFileName=None, categoryList=None, drawHist=False): # etaHist - the TH1D to split into categories # numCat - number of tagging categories # categoryList - list of tagging category names # drawHist - whether or not to display each category-split histogram # sourceFileName - name of etaHist root file #normalize etaHist etaHist.Scale(1.0 / etaHist.Integral()) #create default category names if none are given if categoryList == None: categoryList = [] for i in range(numCat): categoryList += ["Cat" + str(i)] histSum = etaHist.Integral() #get list of limits limList = [-1] for i in range(1, numCat): start = limList[i - 1] + 1 targetVal = histSum / numCat left = start right = etaHist.GetNbinsX() #print "\n\n\n",etaHist.Integral(start,start+1),"\n\n\n\n"; while abs(left - right) > 1: if etaHist.Integral(start, int((left + right) / 2)) > targetVal: right = (left + right) / 2 else: left = (left + right) / 2 #print start, left, right, etaHist.Integral(start,int((left+right)/2)),targetVal,",", if abs(etaHist.Integral(start, left) - targetVal) > abs(etaHist.Integral(start, right) - targetVal): limList += [right] else: limList += [left] limList[0] = 0 limList += [etaHist.GetNbinsX()] print limList #rangeFit is a 4-element TList containing: # - the sourceFileName as element 0 # - a RooThresholdCategory as element 2, and its RooRealVar as element 1 # - a tList of tFitResult as element 3 rangeFit = TList() rangeFit.AddLast(TObjString(sourceFileName)) rangeFit.AddLast(RooRealVar('x', 'x', 0.0, 1.0)) rangeFit.AddLast( RooThresholdCategory("tageffRegion", "region of tageff", rangeFit.At(1), "Cat" + str(numCat))) rangeFit.AddLast(TList()) #plot each category-split histogram, if necessary if drawHist == True: ROOT.gStyle.SetPalette(ROOT.kOcean) #create stack to contain the category TH1Ds etaHistStack = THStack("etaHistStack", "Stack of TH1Ds") #create category-masking function for TH1D clones histCutterFunc = TF1("histCutterFunc", "((x>=[0])?((x<[1])?1.0:0.0):0.0)", 0.0, 1.0) for i in range(len(limList) - 1): etaHistClone = etaHist.Clone() histCutterFunc.SetParameter( 0, etaHist.GetXaxis().GetBinCenter(limList[i])) histCutterFunc.SetParameter( 1, etaHist.GetXaxis().GetBinCenter(limList[i + 1])) #histCutterFunc.Draw(); #raw_input("Press Enter to continue to next hisCutterFunc"); etaHistClone.Multiply(histCutterFunc) etaHistClone.SetFillColor(38 + i) etaHistStack.Add(etaHistClone) etaHistClone = etaHist.Clone() etaHistClone.SetFillColor(38 + len(limList)) #etaHistStack.Add(etaHistClone); import time os.chdir(os.environ['B2DXFITTERSROOT'] + '/tutorial') if (not (os.path.isdir('fits'))): os.mkdir('fits') os.chdir('fits') histCanvas = TCanvas() etaHistStack.Draw("hist PFC") #etaHistClone.Draw("hist PFC"); #histCanvas.SaveAs('tagRegionFitList_%f.pdf' % time.time()); #create thresholds and fitting functions (all linear, but with different ranges corresponding to the categories) currentRangeTF1List = TList() for i in range(1, numCat + 1): if (i <= numCat): rangeFit.At(2).addThreshold( etaHist.GetXaxis().GetBinCenter(limList[i]), categoryList[i - 1]) currentRangeTF1List.AddLast( TF1( "fitFuncEtaset", "[0]+[1]*(x-" + str( etaHist.Integral(limList[i - 1], limList[i]) / (limList[i] - limList[i - 1])) + ")", etaHist.GetXaxis().GetBinCenter(limList[i - 1]), etaHist.GetXaxis().GetBinCenter(limList[i]))) #currentRangeTF1 = TF1("fitFuncEtaset","[0]+[1]*x",etaHist.GetXaxis().GetBinCenter(limList[i-1]),etaHist.GetXaxis().GetBinCenter(limList[i])); rangeFit.Last().AddLast( etaHist.Fit(currentRangeTF1List.Last(), "R0S").Get().Clone()) if (drawHist == True): currentRangeTF1List.Last().DrawCopy('same') #raw_input('Press Enter to continue to next fit function'); #currentRangeTF1.IsA().Destructor(currentRangeTF1); #print "P0, P1 = ",rangeFit.Last().Last().Parameter(0), rangeFit.Last().Last().Parameter(1); histCanvas.SaveAs('tagRegionFitList_%f.pdf' % time.time()) currentRangeTF1List.Delete() #for i in range(1,numCat+1): #currentRangeTF1List.Last().IsA().Destructor(currentRangeTF1List.Last()); #s = raw_input("Press Enter to continue..."); return rangeFit
def recursiveMerge(target, infile, path='', cache={'TOTALLUMI': 0}, cutflow=True): l = infile.GetDirectory(path) keys = l.GetListOfKeys() cycles = {} #print("keys in input file: \n\n{0}\n\n".format(keys.ls())) for entry in range(keys.GetEntries()): name = keys.At(entry).GetName() + ";" + str(keys.At(entry).GetCycle()) if path: cachename = path + "/" + name else: cachename = name obj = l.Get(name) if type(obj) == TDirectoryFile: #print("TDirectory obj name: {0}".format(obj.GetName())) targetpath = keys.At(entry).GetName() if not target.Get(targetpath): target.mkdir(targetpath) recursiveMerge(target, infile, path + "/" + obj.GetName(), cache) elif type(obj) == TTree: #print("TTree obj name: {0} - cachename: {1} ".format(obj.GetName(), cachename)) cyclename, cyclenumber = cachename.split(';') if cyclename in cycles: continue #print("cyclename: {0} - cyclenumber: {1}".format(cyclename, cyclenumber)) cycles[cyclename] = cyclenumber if not cyclename in cache: #print("adding cyclename {0} to cache (via TTree::CloneTree())".format(cyclename)) target.cd(path) cache[cyclename] = obj.CloneTree() else: objcached = cache[cyclename] col = TObjArray() col.Add(obj) #print("merging TTree obj to cached object") objcached.Merge(col) elif issubclass(obj.__class__, TH1): #print("TH1 obj name: {0}".format(obj.GetName())) if not cutflow and keys.At(entry).GetName() == "CutFlow": continue if not cachename in cache: target.cd(path) cache[cachename] = obj.Clone() else: objcached = cache[cachename] col = TObjArray() col.Add(obj) objcached.Merge(col) elif type(obj) == TObjString: #print("TObjString obj name: {0}".format(obj.GetName())) if obj: target.cd(path) objnew = TObjString(obj.GetString().Data()) objnew.Write(keys.At(entry).GetName()) cache['TOTALLUMI'] += 1 elif issubclass(obj.__class__, TList): #print("TList obj name: {0}".format(obj.GetName())) if obj: target.cd(path) objnew = TList(obj) objnew.Write(keys.At(entry).GetName()) # not working... else: print "UNKNOWN OBJECT", name, "OF TYPE", type(obj)