def tofnalhist(obj): if not isinstance(obj, Histogram): raise TypeError("cannot convert {0} to a fnal_column_analysis_tools histogram".format(type(obj).__name__)) axes = [] sparse_binning = {} dense_map = {} dense_shape = [] for i, ax in enumerate(obj.axis): if isinstance(ax.binning, CategoryBinning): sparse_binning[i] = ax.binning.categories if ax.binning.loc_overflow.value < BinLocation.nonexistent.value: sparse_binning[i].insert(0, "") elif ax.binning.loc_overflow.value > BinLocation.nonexistent.value: sparse_binning[i].append("") new_ax = hist.Cat(ax.expression, ax.title) new_ax._categories = sparse_binning[i] axes.append(new_ax) elif isinstance(ax.binning, RegularBinning): dense_map[i] = [-numpy.inf, ..., numpy.inf, numpy.nan] dense_shape.append(ax.binning.num+3) axes.append(hist.Bin(ax.expression, ax.title, ax.binning.num, ax.binning.interval.low, ax.binning.interval.high )) elif isinstance(ax.binning, EdgesBinning): dense_map[i] = [-numpy.inf, ..., numpy.inf, numpy.nan] dense_shape.append(ax.binning.edges.size+2) axes.append(hist.Bin(ax.expression, ax.title, ax.binning.edges )) else: raise TypeError("unable to convert axes of type {0} to fnalhist axes".format(type(ax).__name__)) hout = hist.Hist(obj.title, *axes) if isinstance(obj.counts, WeightedCounts): hout._init_sumw2() walk_shape = [len(s) for s in sparse_binning.values()] walk_indices = numpy.unravel_index(numpy.arange(numpy.prod(walk_shape)), walk_shape) for walk_index in zip(*walk_indices): insert_index = tuple(v[k] for k,v in zip(walk_index, sparse_binning.values())) extract_index = [None]*len(axes) for k,v in dense_map.items(): extract_index[k] = v for k,v in zip(sparse_binning.keys(), walk_index): extract_index[k] = v extract_index = tuple(extract_index) if isinstance(obj.counts, UnweightedCounts): hout._sumw[insert_index] = obj.counts[extract_index] elif isinstance(obj.counts, WeightedCounts): counts = obj.counts[extract_index] hout._sumw[insert_index] = counts['sumw'] hout._sumw2[insert_index] = counts['sumw2'] return hout
def __init__(self, columns=[]): self._columns = columns dataset_axis = hist.Cat("dataset", "Primary dataset") mass_axis = hist.Bin("mass", r"$m_{\mu\mu}$ [GeV]", 30000, 0.25, 300) pt_axis = hist.Bin("pt", r"$p_{T}$ [GeV]", 30000, 0.25, 300) self._accumulator = processor.dict_accumulator({ 'mass': hist.Hist("Counts", dataset_axis, mass_axis), 'pt': hist.Hist("Counts", dataset_axis, pt_axis), 'cutflow': processor.defaultdict_accumulator(int), })
def calc(self, physics_objects, dataset_name): electrons = physics_objects["Electron"] ele = electrons[(electrons.pt > 20) & (np.abs(electrons.eta) < 2.5) & (electrons.cutBased >= 4)] muons = physics_objects["Muon"] mu = muons[(muons.pt > 20) & (np.abs(muons.eta) < 2.4) & (muons.tightId > 0)] # Just to demonstrate broadcast variables weights_eval = self.nonevent_data.value electrons['SF'] = weights_eval["eleScaleFactor_TightId_POG"]( electrons.eta, electrons.pt) ee = ele.distincts() mm = mu.distincts() em = ele.cross(mu) dileptons = {} dileptons['ee'] = ee[(ee.i0.pdgId * ee.i1.pdgId == -11 * 11) & (ee.i0.pt > 25)] dileptons['mm'] = mm[(mm.i0.pdgId * mm.i1.pdgId == -13 * 13)] dileptons['em'] = em[(em.i0.pdgId * em.i1.pdgId == -11 * 13)] channels = {} channels['ee'] = (ee.counts == 1) & (mu.counts == 0) channels['mm'] = (mm.counts == 1) & (ele.counts == 0) channels['em'] = (em.counts == 1) & (ele.counts == 1) & (mu.counts == 1) # dupe = np.zeros(Muon_pt.size, dtype=bool) tot = 0 isRealData = True for channel, cut in channels.items(): zcands = dileptons[channel][cut] # dupe |= cut tot += cut.sum() weight = np.array(1.) zMassHist = self.accumulators["zMass"] zMass = hist.Hist( "Events", zMassHist.dataset_axis, zMassHist.channel_cat_axis, hist.Bin("mass", "$m_{\ell\ell}$ [GeV]", 120, 0, 120), ) zMass.fill(dataset=dataset_name, channel=channel, mass=zcands.mass.flatten(), weight=weight.flatten()) zMassHist.accumulator.add(zMass) return np.zeros(electrons.pt.size)
def compute_zpeak(dataset, nElectron, Electron_pt, Electron_eta, Electron_phi, Electron_mass, Electron_cutBased, Electron_pdgId, Electron_pfRelIso03_all, nMuon, Muon_pt, Muon_eta, Muon_phi, Muon_mass, Muon_tightId, Muon_pdgId, Muon_pfRelIso04_all): global hists, non_event_data tic = time.time() electrons = JaggedCandidateArray.candidatesfromcounts( nElectron.array, pt=Electron_pt.array[0].base, eta=Electron_eta.array[0].base, phi=Electron_phi.array[0].base, mass=Electron_mass.array[0].base, cutBased=Electron_cutBased.array[0].base, pdgId=Electron_pdgId.array[0].base, pfRelIso03_all=Electron_pfRelIso03_all.array[0].base, ) ele = electrons[(electrons.pt > 20) & (np.abs(electrons.eta) < 2.5) & (electrons.cutBased >= 4)] # Just to demonstrate broadcast variables weights_eval = non_event_data.value electrons['SF'] = weights_eval["eleScaleFactor_TightId_POG"]( electrons.eta, electrons.pt) muons = JaggedCandidateArray.candidatesfromcounts( nMuon.values, pt=Muon_pt.array[0].base, eta=Muon_eta.array[0].base, phi=Muon_phi.array[0].base, mass=Muon_mass.array[0].base, tightId=Muon_tightId.array[0].base, pdgId=Muon_pdgId.array[0].base, pfRelIso04_all=Muon_pfRelIso04_all.array[0].base, ) mu = muons[(muons.pt > 20) & (np.abs(muons.eta) < 2.4) & (muons.tightId > 0)] ee = ele.distincts() mm = mu.distincts() em = ele.cross(mu) dileptons = {} dileptons['ee'] = ee[ (ee.i0.pdgId * ee.i1.pdgId == -11 * 11) & (ee.i0.pt > 25)] dileptons['mm'] = mm[(mm.i0.pdgId * mm.i1.pdgId == -13 * 13)] dileptons['em'] = em[(em.i0.pdgId * em.i1.pdgId == -11 * 13)] channels = {} channels['ee'] = (ee.counts == 1) & (mu.counts == 0) channels['mm'] = (mm.counts == 1) & (ele.counts == 0) channels['em'] = (em.counts == 1) & (ele.counts == 1) & ( mu.counts == 1) dupe = np.zeros(Muon_pt.size, dtype=bool) tot = 0 isRealData = True for channel, cut in channels.items(): zcands = dileptons[channel][cut] dupe |= cut tot += cut.sum() weight = np.array(1.) zMassHist = hists["zMass"]["accumulator"] zMass = hist.Hist("Events", hists["zMass"]["dataset_axis"], hists["zMass"]["channel_cat_axis"], hist.Bin("mass", "$m_{\ell\ell}$ [GeV]", 120, 0, 120), ) zMass.fill(dataset=dataset[0], channel=channel, mass=zcands.mass.flatten(), weight=weight.flatten()) zMassHist.add(zMass) dt = time.time() - tic return pd.Series(np.ones(Electron_pt.size) * dt/Electron_pt.size)
def addInPlace(self, val1, val2): val1 += val2 return val1 # Create a histogram accumulator for ZMass hists = OrderedDict() dataset_axis = hist.Cat("dataset", "DAS name") channel_cat_axis = hist.Cat("channel", "dilepton flavor") hists['zMass'] = { "accumulator": spark.sparkContext.accumulator( hist.Hist("Events", dataset_axis, channel_cat_axis, hist.Bin("mass", "$m_{\ell\ell}$ [GeV]", 120, 0, 120), ), FLNAL_Hist_AccumulatorParam() ), "dataset_axis": hist.Cat("dataset", "DAS name"), "channel_cat_axis": hist.Cat("channel", "dilepton flavor") } # Create a broadcast variable for the non-event data weightsext = lookup_tools.extractor() correctionDescriptions = open("newCorrectionFiles.txt").readlines() weightsext.add_weight_sets(correctionDescriptions) weightsext.finalize() weights_eval = weightsext.make_evaluator() non_event_data = spark.sparkContext.broadcast(weights_eval)
def test_hist(): counts, test_eta, test_pt = dummy_jagged_eta_pt() h_nothing = hist.Hist("empty inside") assert h_nothing.sparse_dim() == h_nothing.dense_dim() == 0 assert h_nothing.values() == {} h_regular_bins = hist.Hist("regular joe", hist.Bin("x", "x", 20, 0, 200), hist.Bin("y", "why", 20, -3, 3)) h_regular_bins.fill(x=test_pt, y=test_eta) nentries = np.sum(counts) assert h_regular_bins.sum( "x", "y", overflow='all').values(sumw2=True)[()] == (nentries, nentries) # bin x=2, y=10 (when overflow removed) count_some_bin = np.sum((test_pt >= 20.) & (test_pt < 30.) & (test_eta >= 0.) & (test_eta < 0.3)) assert h_regular_bins.project("x", slice(20, 30)).values()[()][10] == count_some_bin assert h_regular_bins.project("y", slice(0, 0.3)).values()[()][2] == count_some_bin h_reduced = h_regular_bins[10:, -.6:] # bin x=1, y=2 assert h_reduced.project("x", slice(20, 30)).values()[()][2] == count_some_bin assert h_reduced.project("y", slice(0, 0.3)).values()[()][1] == count_some_bin h_reduced.fill(x=23, y=0.1) assert h_reduced.project("x", slice(20, 30)).values()[()][2] == count_some_bin + 1 assert h_reduced.project("y", slice(0, 0.3)).values()[()][1] == count_some_bin + 1 animal = hist.Cat("animal", "type of animal") vocalization = hist.Cat("vocalization", "onomatopoiea is that how you spell it?") h_cat_bins = hist.Hist("I like cats", animal, vocalization) h_cat_bins.fill(animal="cat", vocalization="meow", weight=2.) h_cat_bins.fill(animal="dog", vocalization="meow", weight=np.array([-1., -1., -5.])) h_cat_bins.fill(animal="dog", vocalization="woof", weight=100.) h_cat_bins.fill(animal="dog", vocalization="ruff") assert h_cat_bins.values()[("cat", "meow")] == 2. assert h_cat_bins.values(sumw2=True)[("dog", "meow")] == (-7., 27.) assert h_cat_bins.project( "vocalization", ["woof", "ruff"]).values(sumw2=True)[("dog", )] == (101., 10001.) height = hist.Bin("height", "height [m]", 10, 0, 5) h_mascots_1 = hist.Hist( "fermi mascot showdown", animal, vocalization, height, # weight is a reserved keyword hist.Bin("mass", "weight (g=9.81m/s**2) [kg]", np.power(10., np.arange(5) - 1)), ) adult_bison_h = np.random.normal(loc=2.5, scale=0.2, size=40) adult_bison_w = np.random.normal(loc=700, scale=100, size=40) h_mascots_1.fill(animal="bison", vocalization="huff", height=adult_bison_h, mass=adult_bison_w) goose_h = np.random.normal(loc=0.4, scale=0.05, size=1000) goose_w = np.random.normal(loc=7, scale=1, size=1000) h_mascots_1.fill(animal="goose", vocalization="honk", height=goose_h, mass=goose_w) crane_h = np.random.normal(loc=1, scale=0.05, size=4) crane_w = np.random.normal(loc=10, scale=1, size=4) h_mascots_1.fill(animal="crane", vocalization="none", height=crane_h, mass=crane_w) h_mascots_2 = h_mascots_1.copy() h_mascots_2.clear() baby_bison_h = np.random.normal(loc=.5, scale=0.1, size=20) baby_bison_w = np.random.normal(loc=200, scale=10, size=20) baby_bison_cutefactor = 2.5 * np.ones_like(baby_bison_w) h_mascots_2.fill(animal="bison", vocalization="baa", height=baby_bison_h, mass=baby_bison_w, weight=baby_bison_cutefactor) h_mascots_2.fill(animal="fox", vocalization="none", height=1., mass=30.) h_mascots = h_mascots_1 + h_mascots_2 assert h_mascots.project("vocalization", "h*").sum("height", "mass", "animal").values()[()] == 1040. species_class = hist.Cat("species_class", "where the subphylum is vertibrates") classes = { 'birds': ['goose', 'crane'], 'mammals': ['bison', 'fox'], } h_species = h_mascots.group(species_class, "animal", classes) assert set(h_species.project("vocalization").values().keys()) == set([ ('birds', ), ('mammals', ) ]) nbirds_bin = np.sum((goose_h >= 0.5) & (goose_h < 1) & (goose_w > 10) & (goose_w < 100)) nbirds_bin += np.sum((crane_h >= 0.5) & (crane_h < 1) & (crane_w > 10) & (crane_w < 100)) assert h_species.project("vocalization").values()[( 'birds', )][1, 2] == nbirds_bin tally = h_species.sum("mass", "height", "vocalization").values() assert tally[('birds', )] == 1004. assert tally[('mammals', )] == 91. h_species.scale({"honk": 0.1, "huff": 0.9}, axis="vocalization") h_species.scale(5.) tally = h_species.sum("mass", height, vocalization).values(sumw2=True) assert tally[('birds', )] == (520., 350.) assert tally[('mammals', )] == (435., 25 * (40 * (0.9**2) + 20 * (2.5**2) + 1)) assert h_species.axis("vocalization") is vocalization assert h_species.axis("height") is height assert h_species.project("vocalization", "h*").axis("height") is height tall_class = hist.Cat("tall_class", "species class (species above 1m)") mapping = { 'birds': (['goose', 'crane'], slice(1., None)), 'mammals': (['bison', 'fox'], slice(1., None)), } h_tall = h_mascots.group(tall_class, (animal, height), mapping) tall_bird_count = np.sum(goose_h >= 1.) + np.sum(crane_h >= 1) assert h_tall.sum("mass", "vocalization").values()[('birds', )] == tall_bird_count tall_mammal_count = np.sum(adult_bison_h >= 1.) + np.sum( baby_bison_h >= 1) + 1 assert h_tall.sum( "mass", "vocalization").values()[('mammals', )] == tall_mammal_count
#!/usr/bin/env python import uproot, uproot_methods import numpy as np np.seterr(divide='ignore', invalid='ignore') from Builder import Initialize from fnal_column_analysis_tools import hist hists = { 'recoil': hist.Hist("Events", hist.Cat("dataset", "Primary dataset"), hist.Cat("region", "Region"), hist.Bin("recoil","Hadronic Recoil",[250.0, 280.0, 310.0, 340.0, 370.0, 400.0, 430.0, 470.0, 510.0, 550.0, 590.0, 640.0, 690.0, 740.0, 790.0, 840.0, 900.0, 960.0, 1020.0, 1090.0, 1160.0, 1250.0])), 'mindphi': hist.Hist("Events", hist.Cat("dataset", "Primary dataset"), hist.Cat("region", "Region"), hist.Bin("mindphi","Min dPhi(MET,AK4s)",15,0,6.28)), 'j1pt': hist.Hist("Events", hist.Cat("dataset", "Primary dataset"), hist.Cat("region", "Region"), hist.Bin("j1pt","AK4 Leading Jet Pt",50,30,500)), 'fj1pt': hist.Hist("Events", hist.Cat("dataset", "Primary dataset"), hist.Cat("region", "Region"), hist.Bin("fj1pt","AK15 Leading Jet Pt",50,200,700)), 'njets': hist.Hist("Events", hist.Cat("dataset", "Primary dataset"), hist.Cat("region", "Region"), hist.Bin("njets","AK4 Number of Jets",6,0,5)), 'nfjets': hist.Hist("Events", hist.Cat("dataset", "Primary dataset"), hist.Cat("region", "Region"), hist.Bin("nfjets","AK15 Number of Jets",4,0,3)), 'fjmass': hist.Hist("Events", hist.Cat("dataset", "Primary dataset"), hist.Cat("region", "Region"), hist.Bin("fjmass","AK15 Jet Mass",50,20,250)), 'TvsQCD': hist.Hist("Events", hist.Cat("dataset", "Primary dataset"), hist.Cat("region", "Region"), hist.Bin("TvsQCD","TvsQCD",15,0,1)), 'WvsQCD': hist.Hist("Events", hist.Cat("dataset", "Primary dataset"), hist.Cat("region", "Region"), hist.Bin("WvsQCD","WvsQCD",15,0,1)), 'ZvsQCD': hist.Hist("Events", hist.Cat("dataset", "Primary dataset"), hist.Cat("region", "Region"), hist.Bin("ZvsQCD","ZvsQCD",15,0,1)), 'VvsQCD': hist.Hist("Events", hist.Cat("dataset", "Primary dataset"), hist.Cat("region", "Region"), hist.Bin("VvsQCD","VvsQCD",15,0,1)), 'ZHbbvsQCD': hist.Hist("Events", hist.Cat("dataset", "Primary dataset"), hist.Cat("region", "Region"), hist.Bin("ZHbbvsQCD","ZHbbvsQCD",15,0,1)), 'ZHccvsQCD': hist.Hist("Events", hist.Cat("dataset", "Primary dataset"), hist.Cat("region", "Region"), hist.Bin("ZHccvsQCD","ZHccvsQCD",15,0,1)), 'WcqvsQCD': hist.Hist("Events", hist.Cat("dataset", "Primary dataset"), hist.Cat("region", "Region"), hist.Bin("WcqvsQCD","WcqvsQCD",15,0,1)), 'WqqvsQCD': hist.Hist("Events", hist.Cat("dataset", "Primary dataset"), hist.Cat("region", "Region"), hist.Bin("WqqvsQCD","WqqvsQCD",15,0,1)), 'ZbbvsQCD': hist.Hist("Events", hist.Cat("dataset", "Primary dataset"), hist.Cat("region", "Region"), hist.Bin("ZbbvsQCD","ZbbvsQCD",15,0,1)), 'ZccvsQCD': hist.Hist("Events", hist.Cat("dataset", "Primary dataset"), hist.Cat("region", "Region"), hist.Bin("ZccvsQCD","ZccvsQCD",15,0,1)), 'ZqqvsQCD': hist.Hist("Events", hist.Cat("dataset", "Primary dataset"), hist.Cat("region", "Region"), hist.Bin("ZqqvsQCD","ZqqvsQCD",15,0,1)) } samples = { "iszeroL":('ZJets','WJets','DY','TT_TuneCUETP8M2T4','ST_t-channel','ST_tW','WW_TuneCUETP8M1','WZ_TuneCUETP8M1','ZZ_TuneCUETP8M1','QCD','VH_HToBB','WminusH','WplusH','ttHTobb','GluGluHToBB','VBFHToBB','MET'),
""" dazsle gghbb analysis plots """ from fnal_column_analysis_tools import hist from fnal_column_analysis_tools.hist import plot from copy import deepcopy dataset = hist.Cat("dataset", "Primary dataset") systematic = hist.Cat("systematic", "Systematic Variation") gencat = hist.Bin("ak8_isHadronicV", "Matched", [-1,0,1,2,3,9,10,11]) # one can relabel intervals, although process mapping obviates this titles = ["Data","QCD", "V(light) matched", "V(c) matched", "V(b) matched", "Top W(ud)+b", "Top W(cs)+b"] for i,v in enumerate(gencat.identifiers()): setattr(v, 'label', titles[i]) jetpt = hist.Bin("ak8_pt", "Jet $p_T$", [450, 500, 550, 600, 675, 800, 1000]) jetpt_coarse = hist.Bin("ak8_pt", "Jet $p_T$", [450, 800]) jetmass = hist.Bin("ak8_msd", "Jet $m_{sd}$", 23, 40, 201) jetmass_coarse = hist.Bin("ak8_msd", "Jet $m_{sd}$", [40, 100, 140, 200]) jetrho = hist.Bin("jetrho", r"Jet $\rho$", 13, -6, -2.1) doubleb = hist.Bin("ak8_deepdoubleb", "Double-b", 20, 0., 1) doublec = hist.Bin("ak8_deepdoublec", "Double-c", 20, 0., 1.) doublecvb = hist.Bin("ak8_deepdoublecvb", "Double-cvb", 20, 0., 1.) doubleb_coarse = [1., 0.93, 0.92, 0.89, 0.85, 0.7] doubleb_coarse = hist.Bin("ak8_deepdoubleb", "Double-b", doubleb_coarse[::-1]) doublec_coarse = [0.87, 0.84, 0.83, 0.79, 0.69, 0.58] doublec_coarse = hist.Bin("ak8_deepdoublec", "Double-c", doublec_coarse[::-1]) doublecvb_coarse = [0.93, 0.91, 0.86, 0.76, 0.6, 0.17, 0.12] doublecvb_coarse = hist.Bin("ak8_deepdoublecvb", "Double-cvb", doublecvb_coarse[::-1]) n2ddt_coarse = hist.Bin("ak8_N2sdb1_ddt", "N2 DDT", [0.])
def __init__(self, corrections, debug=False): self._corrections = corrections self._debug = debug dataset_axis = hist.Cat("dataset", "Primary dataset") gencat_axis = hist.Bin("AK8Puppijet0_isHadronicV", "V matching index", [0, 1, 2, 3, 9, 10, 11]) jetpt_axis = hist.Bin("AK8Puppijet0_pt", r"Jet $p_T$", [450, 500, 550, 600, 675, 800, 1200]) jetmass_axis = hist.Bin("AK8Puppijet0_msd", r"Jet $m_{sd}$", 23, 40, 201) jetpt_coarse_axis = hist.Bin("AK8Puppijet0_pt", r"Jet $p_T$", [450, 1200]) jetmass_coarse_axis = hist.Bin("AK8Puppijet0_msd", r"Jet $m_{sd}$", [40, 103, 152, 201]) jetrho_axis = hist.Bin("ak8jet_rho", r"Jet $\rho$", 13, -6, -2.1) doubleb_axis = hist.Bin("AK8Puppijet0_deepdoubleb", "Double-b", 20, 0., 1) doublec_axis = hist.Bin("AK8Puppijet0_deepdoublec", "Double-c", 20, 0., 1.) doublecvb_axis = hist.Bin("AK8Puppijet0_deepdoublecvb", "Double-cvb", 20, 0., 1.) doubleb_wps = [1., 0.9, 0.89, 0.85, 0.7] doubleb_coarse_axis = hist.Bin("AK8Puppijet0_deepdoubleb", "Double-b", doubleb_wps[::-1]) doublec_wps = [0.87, 0.84, 0.83, 0.79, 0.69] doublec_coarse_axis = hist.Bin("AK8Puppijet0_deepdoublec", "Double-c", doublec_wps[::-1]) doublecvb_wps = [0.93, 0.91, 0.6, 0.2, 0.17] doublecvb_coarse_axis = hist.Bin("AK8Puppijet0_deepdoublecvb", "Double-cvb", doublecvb_wps[::-1]) hists = processor.dict_accumulator() hist.Hist.DEFAULT_DTYPE = 'f' # save some space by keeping float bin counts instead of double hists['sumw'] = processor.dict_accumulator( ) # the defaultdict_accumulator is broken :< hists['jetpt_preselection'] = hist.Hist( "Events", dataset_axis, gencat_axis, hist.Bin("AK8Puppijet0_pt", "Jet $p_T$", 100, 300, 1300), ) hists['jeteta_preselection'] = hist.Hist( "Events", dataset_axis, gencat_axis, hist.Bin("AK8Puppijet0_eta", r"Jet $\eta$", 50, -3, 3), ) hists['jetpt_muoncontrol'] = hist.Hist( "Events", dataset_axis, gencat_axis, hist.Bin("AK8Puppijet0_pt", "Jet $p_T$", 100, 300, 1300), ) hists['muonpt_muoncontrol'] = hist.Hist( "Events", dataset_axis, gencat_axis, hist.Bin("vmuoLoose0_pt", "Leading muon $p_T$", 100, 0, 1000), ) hists['muoneta_muoncontrol'] = hist.Hist( "Events", dataset_axis, gencat_axis, hist.Bin("vmuoLoose0_eta", r"Leading muon $\eta$", 50, -3, 3), ) hists['jetpt_signalregion'] = hist.Hist( "Events", dataset_axis, gencat_axis, hist.Bin("AK8Puppijet0_pt", "Jet $p_T$", 100, 300, 1300)) hists['sculpt_signalregion'] = hist.Hist( "Events", dataset_axis, gencat_axis, jetpt_axis, jetmass_axis, doubleb_coarse_axis, doublec_coarse_axis, doublecvb_coarse_axis) hists['tagtensor_signalregion'] = hist.Hist( "Events", dataset_axis, gencat_axis, jetpt_coarse_axis, jetmass_coarse_axis, doubleb_axis, doublec_axis, doublecvb_axis) hists['opposite_ak8_n3sdb1_signalregion'] = hist.Hist( "Events", dataset_axis, gencat_axis, jetpt_coarse_axis, jetmass_coarse_axis, hist.Bin("opposite_ak8_n3sdb1", r"Jet $N_{3,sd}^{\beta=1}$", 40, 0.5, 3)) hists['opposite_ak8_tau32_signalregion'] = hist.Hist( "Events", dataset_axis, gencat_axis, jetpt_coarse_axis, jetmass_coarse_axis, hist.Bin("opposite_ak8_tau32", r"Jet $\tau_{32}$", 40, 0, 1)) hists['opposite_ak8_msd_signalregion'] = hist.Hist( "Events", dataset_axis, gencat_axis, jetpt_coarse_axis, jetmass_coarse_axis, hist.Bin("opposite_ak8_msd", r"Jet $\m_{sd}$", 40, 50, 200)) hists['njets_ak4_signalregion'] = hist.Hist( "Events", dataset_axis, gencat_axis, jetpt_coarse_axis, jetmass_coarse_axis, hist.Bin("nAK4PuppijetsPt30", "Number AK4 Jets", 8, 0, 8)) hists['nminus1_antiak4btagMediumOppHem_signalregion'] = hist.Hist( "Events", dataset_axis, gencat_axis, jetpt_coarse_axis, jetmass_coarse_axis, hist.Bin("opposite_ak4_leadingDeepCSV", r"Max(DeepCSV) (of $\leq4$ leading)", 40, 0, 1)) hists['nminus1_pfmet_signalregion'] = hist.Hist( "Events", dataset_axis, gencat_axis, jetpt_coarse_axis, jetmass_coarse_axis, doubleb_coarse_axis, hist.Bin("pfmet", r"PF $p_{T}^{miss}$", 40, 0, 200)) hists['nminus1_n2ddtPass_signalregion'] = hist.Hist( "Events", dataset_axis, gencat_axis, jetmass_coarse_axis, doubleb_coarse_axis, hist.Bin("ak8jet_n2ddt", r"Jet $N_{2,DDT}^{\beta=1}$", 40, -.25, .25)) hists['nminus1_ak4btagMediumDR08_muoncontrol'] = hist.Hist( "Events", dataset_axis, gencat_axis, jetmass_coarse_axis, doubleb_coarse_axis, hist.Bin("ak4_leadingDeepCSV_dR08", r"Max(DeepCSV) ($\DeltaR(ak4, ak8)>0.8$)", 40, 0, 1)) hists['nminus1_muonDphiAK8_muoncontrol'] = hist.Hist( "Events", dataset_axis, gencat_axis, jetmass_coarse_axis, doubleb_coarse_axis, hist.Bin("muon_dphi", r"$\Delta\phi(\mu, j)$", 40, 0, np.pi)) hists['templates_signalregion'] = hist.Hist( "Events", dataset_axis, gencat_axis, hist.Cat("systematic", "Systematic"), jetpt_axis, jetmass_axis, doubleb_coarse_axis) hists['templates_muoncontrol'] = hist.Hist( "Events", dataset_axis, gencat_axis, hist.Cat("systematic", "Systematic"), jetpt_axis, jetmass_axis, doubleb_coarse_axis) hists['templates_hCCsignalregion'] = hist.Hist( "Events", dataset_axis, gencat_axis, hist.Cat("systematic", "Systematic"), jetpt_axis, jetmass_axis, doublec_coarse_axis) hists['templates_hCCmuoncontrol'] = hist.Hist( "Events", dataset_axis, gencat_axis, hist.Cat("systematic", "Systematic"), jetpt_axis, jetmass_axis, doublec_coarse_axis) self._accumulator = hists