def main(): gr1 = TRandom3() gr2 = TRandom3() gr3 = TRandom3() gr4 = TRandom3() gr5 = TRandom3() gr6 = TRandom3() gr7 = TRandom3() gr8 = TRandom3() gr9 = TRandom3() gr10 = TRandom3() gr1.SetSeed(0) gr2.SetSeed(0) gr3.SetSeed(0) gr4.SetSeed(0) gr5.SetSeed(0) gr6.SetSeed(0) gr7.SetSeed(0) gr8.SetSeed(0) gr9.SetSeed(0) gr10.SetSeed(0) for i in range(1, 1000000): n = [] n1 = gr1.Poisson(b[0]) n2 = gr2.Poisson(b[1]) n3 = gr3.Poisson(b[2]) n4 = gr4.Poisson(b[3]) n5 = gr5.Poisson(b[4]) n6 = gr6.Poisson(b[5]) n7 = gr7.Poisson(b[6]) n8 = gr8.Poisson(b[7]) n9 = gr9.Poisson(b[8]) n10 = gr10.Poisson(b[9]) n.append(n1) n.append(n2) n.append(n3) n.append(n4) n.append(n5) n.append(n6) n.append(n7) n.append(n8) n.append(n9) n.append(n10) numerator = ll(0, n, n) denominator = ll(0, b, n) nll = 2 * (numerator - denominator) h1.Fill(nll) f1 = TF1("f1", "[0]*(x**4)*exp(-0.5*x)", 0., 50) h1.Fit('chi2') c1 = TCanvas('', '', 800, 800) c1.cd() h1.Draw() f1.Draw('same') c1.SaveAs('aa.png')
def __init__(self): #east and west ZDC parametrizations self.e = self.param() self.w = self.param() #systematic effects self.e.a = 0.18 self.w.a = 0.09 #sampling fraction self.e.b = 1.6 self.w.b = 2.8 #noise self.e.c = 0.1 self.w.c = 0.1 #ADC conversion self.e.k1 = 0.66 self.e.k2 = -18.94 self.w.k1 = 0.68 self.w.k2 = -15.52 #random generator self.rnd = TRandom3()
def __init__(self, seed, wp='medium', measurement='central'): self.randm = TRandom3(seed) self.mc_eff_file = TFile( '$CMSSW_BASE/src/CMGTools/H2TauTau/data/tagging_efficiencies_ichep2016.root' ) # MC b-tag efficiencies as measured in HTT by Adinda self.btag_eff_b = self.mc_eff_file.Get('btag_eff_b') self.btag_eff_c = self.mc_eff_file.Get('btag_eff_c') self.btag_eff_oth = self.mc_eff_file.Get('btag_eff_oth') # b-tag SFs from POG calib = ROOT.BTagCalibration( "csvv2", os.path.expandvars( "$CMSSW_BASE/src/CMGTools/H2TauTau/data/CSVv2_ichep.csv")) op_dict = {'loose': 0, 'medium': 1, 'tight': 2} print 'Booking b/c reader' v_sys = getattr(ROOT, 'vector<string>')() v_sys.push_back('up') v_sys.push_back('down') self.reader_bc = ROOT.BTagCalibrationReader(op_dict[wp], measurement, v_sys) self.reader_bc.load(calib, 0, 'comb') self.reader_bc.load(calib, 1, 'comb') print 'Booking light reader' self.reader_light = ROOT.BTagCalibrationReader(op_dict[wp], measurement, v_sys) self.reader_light.load(calib, 2, 'incl')
def generate_toys( overlap, vtxresx, vtxresy=None, rand=None, nbins=95, verbose=False ): """Generate toy data of Beam Imaging scan. overlap: Beam shape overlap function model (TF2). vtxresx, vtxresy: Vertex resolution. rand: Random number generator (if not given: use a TRandom3). nbins: Number of bins in histograms. """ if rand is None: rand = TRandom3() rand.SetSeed(0) if vtxresy is None: vtxresy = vtxresx overlap.SetNpx(500) overlap.SetNpy(500) generator = ToyGenerator(overlap, rand, vtxresx, vtxresy) generator.SetVerbose(verbose) pos = [-9.0+i*1.0 for i in range(19)] hists = [] nevents = [] for i in ('2', '1'): for c in ('X', 'Y'): name = 'Beam{0}Move{1}_Add'.format(i, c) par = {'1': 0, '2': 2}[i] + {'X': 0, 'Y': 1}[c] hist, nevent = generator.SimulateScan(par, pos, nbins) hist.SetName(name) hists.append(hist) nevents.append(nevent) return hists, nevents
def testSampling(mnevent=1000, nsamples=1000, opt="p"): print "Number of samples:", nsamples if "p" in opt: print "Variable number of events from Poisson mu=", mnevent else: print "Fixed number of events n=", mnevent if "w1" in opt: print "Weighted distribution w1" elif "w2" in opt: print "Weighted distribution w2" gRandom = TRandom3() fun = TF1("fun", "1000*x*exp(-x*20)", 0.0, 0.5) hists = dict() for isample in range(nsamples): hist = TH1D("histh" + str(isample), "1000*x*exp(-x*20) h", nbin, binEdges) if "p" in opt: nevent = gRandom.Poisson(mnevent) else: nevent = mnevent for i in range(int(nevent)): value = fun.GetRandom() nmom = nmomFromOpt(opt) # fillweight= weight( value, opt ) fillweight = value**nmom hist.Fill(value, fillweight) hists[isample] = hist errorMatrixSample = sampleErrorMatrix(hists) print "Error matrix" printMatrix(errorMatrixSample, 7, 4) corr = cov2corr(errorMatrixSample) print "Correlation matrix" printMatrix(corr, 6, 3) return
def applySF(isTagged, tag_SF, tag_eff): newTag = isTagged if (tag_SF == 1.): return newTag #no correction needed #throw die Rand = TRandom3(0) coin = Rand.Uniform(1.) if (tag_SF > 1): # use this if SF>1 if (not isTagged): #fraction of jets that need to be upgraded mistagPercent = (1.0 - tag_SF) / (1.0 - (tag_SF / tag_eff)) #upgrade to tagged if (coin < mistagPercent): newTag = True else: # use this if SF<1 # downgrade tagged to untagged if (isTagged and coin > tag_SF): newTag = False return newTag
def __init__(self): #east and west ZDC parametrizations self.e = self.param() self.w = self.param() #scaling Poisson term self.e.a1 = 1.5 self.w.a1 = 2.8 #non-compensation self.e.a2 = 0.67 self.w.a2 = 2.5 #material constant, GeV self.e.E0 = 1.4 self.w.E0 = 1.4 #power law parameter self.e.l = 0.7 self.w.l = 0.3 #ADC conversion self.e.k1 = 0.66 self.e.k2 = -18.94 self.w.k1 = 0.68 self.w.k2 = -15.52 #random generator self.rnd = TRandom3()
def __init__(self): #constant term #self.s0E = 0. #self.s0W = 0. self.s0E = 0.1 self.s0W = 0.3 #linear term in energy #self.s1E = 0.4 # all mass #self.s1W = 0.7 self.s1E = 1.4 self.s1W = 1. #quadratic in energy #self.s2E = 0.2 # all mass #self.s2W = 0.23 self.s2E = 0.19 self.s2W = 0.3 #difference in mean for ADC, east and west #self.deltE = 25.2 # all mass #self.deltW = 11.5 self.deltE = 100.9-79.1 # J/psi mass self.deltW = 101.5-94.1 #random generator self.rnd = TRandom3()
def overlap_variations(model, rand=None, n=100, verbose=False): """Compute overlap integral with uncertainty from parameter variations. model: Beam shape model (derived from BeamShapeCore). rand: TRandom random number generator (optional). n: Number of variations (default: 100). verbose: Set True for output at every step. """ if rand is None: rand = TRandom3() rand.SetSeed(0) overlap = model.overlap_func() true = overlap.Integral(-30.0, 30.0, -30.0, 30.0) print '<<< True overlap: {0}'.format(true) values = [] for i in range(n): overlap = model.assign_overlap(overlap, random=rand) value = overlap.Integral(-30.0, 30.0, -30.0, 30.0) if value <= 0.0: continue values.append(value) if verbose: print '<<< Variation {0}: {1}'.format(i, value) if len(values) == 0: return -1.0, -1.0, -1.0 avg = sum(values) / len(values) rms = (sum([(v - avg)**2 for v in values]) / len(values))**0.5 diff = abs(true - avg) / true if diff < 0.01 or diff > 100.0: overlap = model.assign_overlap(overlap) true = overlap.Integral(-30.0, 30.0, -30.0, 30.0, 1.0e-12) return true, avg, rms
def __init__ (self, seed, mc_eff_file, sf_file, wp='medium', measurement='central') : self.randm = TRandom3(seed) self.mc_eff_file = TFile(mc_eff_file) # b-tag SFs from POG calib = ROOT.BTagCalibration('deepjet', sf_file) op_dict = OrderedDict() op_dict['loose' ] = 0 op_dict['medium'] = 1 op_dict['tight' ] = 2 print 'Booking b/c reader' v_sys = getattr(ROOT, 'vector<string>')() v_sys.push_back('up') v_sys.push_back('down') self.reader_bc = ROOT.BTagCalibrationReader(op_dict[wp], measurement, v_sys) self.reader_bc.load(calib, 0, 'comb') self.reader_bc.load(calib, 1, 'comb') print 'Booking light reader' self.reader_light = ROOT.BTagCalibrationReader(op_dict[wp], measurement, v_sys) self.reader_light.load(calib, 2, 'incl')
def __init__ (self, seed, wp='loose', measurement='central') : self.randm = TRandom3(seed) rootfname = '/'.join([os.environ["CMSSW_BASE"], 'src/CMGTools/TTbarTime/data/btag_efficiency_CSVv2.root']) #tagging_efficiencies_march2018_btageff-all_samp-inc-DeepCSV_medium.root self.mc_eff_file = TFile(rootfname) # MC b-tag efficiencies as measured in HTT by Adinda self.btag_eff_b = self.mc_eff_file.Get('btag_eff_b') self.btag_eff_c = self.mc_eff_file.Get('btag_eff_c') self.btag_eff_oth = self.mc_eff_file.Get('btag_eff_oth') # b-tag SFs from POG calib = ROOT.BTagCalibration("CSVv2", os.path.expandvars("$CMSSW_BASE/src/CMGTools/TTbarTime/data/CSVv2_94XSF_V2_B_F.csv")) op_dict = { 'loose':0, 'medium':1, 'tight':2 } print 'Booking b/c reader' v_sys = getattr(ROOT, 'vector<string>')() v_sys.push_back('up') v_sys.push_back('down') self.reader_bc = ROOT.BTagCalibrationReader(op_dict[wp], measurement, v_sys) self.reader_bc.load(calib, 0, 'comb') self.reader_bc.load(calib, 1, 'comb') print 'Booking light reader' self.reader_light = ROOT.BTagCalibrationReader(op_dict[wp], measurement, v_sys) self.reader_light.load(calib, 2, 'incl')
def ExpectedSignificance_ToyMC(mean_bgd, Delta_bgd, mean_sig, n_MC): gROOT.Clear() gROOT.Delete() # Define count histograms h_Nbgr = TH1D("h_Nbgr","Background events",500,-0.5,499.5) # Initialize seed rand = TRandom3() # Generate toy datasets for i in range(1,n_MC+1): mean_bgr = rand.Gaus(mean_bgd, Delta_bgd) mean_sb = rand.Gaus(mean_bgd, Delta_bgd)+mean_sig h_Nbgr.Fill(rand.Poisson(mean_bgr)) # Calculate p-values pvalue = h_Nbgr.Integral(h_Nbgr.FindBin(mean_bgd+mean_sig),h_Nbgr.GetNbinsX())/h_Nbgr.Integral() significance = ROOT.Math.gaussian_quantile_c(pvalue,1) #print(pvalue," ",significance) print('Expected significance after rescaling: ',significance) return significance
def __init__(self, parse, tree, hepmc_attrib): #minumum and maximum energy, GeV self.emin = parse.getfloat("main", "emin") self.emax = parse.getfloat("main", "emax") print("emin =", self.emin) print("emax =", self.emax) #pdg for generated particle, electron or photon self.pdg = 22 if parse.has_option("main", "pdg"): self.pdg = parse.getint("main", "pdg") print("pdg =", self.pdg) #angular range, electrons for now self.theta_min = 0. self.theta_max = 0. #angles as mlt = -log_10(pi - theta) if parse.has_option("main", "mlt_min"): mlt_min = parse.getfloat("main", "mlt_min") mlt_max = parse.getfloat("main", "mlt_max") print("mlt_min =", mlt_min) print("mlt_max =", mlt_max) self.theta_min = TMath.Pi() - 10.**(-mlt_min) self.theta_max = TMath.Pi() - 10.**(-mlt_max) print("theta_min =", self.theta_min) print("theta_max =", self.theta_max) #generator functions for photons and electrons self.gen_func = {} self.gen_func[22] = self.gen_phot self.gen_func[11] = self.gen_el #test for pdg if self.gen_func.get(self.pdg) is None: print("Fatal: pdg", self.pdg, "is not supported") raise KeyError #uniform generator self.rand = TRandom3() self.rand.SetSeed(5572323) #electron mass self.me = TDatabasePDG.Instance().GetParticle(11).Mass() #set the output tree if self.pdg == 22: tnam = ["gen_E"] if self.pdg == 11: tnam = ["true_el_pT", "true_el_theta", "true_el_phi", "true_el_E"] self.out = self.make_tree(tree, tnam) #event attributes for hepmc self.hepmc_attrib = hepmc_attrib print("Uniform generator initialized")
def reduceDataset(data, scale=1.0): print 'reducing', data.GetName(), 'by scale %.4g' % scale newData = data.emptyClone(data.GetName() + '_reduced') rnd = TRandom3() for i in range(0, data.numEntries()): if (rnd.Rndm() < scale): newData.add(data.get(i), data.weight()) return newData
def stworzZestaw(pojemnikZapisu,pojemnikDanych,liczbaDanych, liczbaKanalow): gener=TRandom3(0) for i in range(liczbaDanych): zestaw=ZestawDanych(liczbaKanalow) zestaw.wypelnijKanaly(gener) zapiszDoKanalu for j in range(liczbaKanalow): zapiszDoKanalu(j, 1, zestaw, pojemnikDanych) zapiszDoKanalu(j, 0, zestaw, pojemnikDanych) pojemnikZapisu.Fill()
def __init__(self, globalTag=None, jetType="AK4PFchs", jmr_vals=[1.09, 1.14, 1.04], year=2017, systematics=True): #-------------------------------------------------------------------------------------------- # CV: globalTag and jetType not yet used, as there is no consistent set of txt files for # JES uncertainties and JER scale factors and uncertainties yet #-------------------------------------------------------------------------------------------- # GLOBAL TAG if globalTag == None: if year == 2016: globalTag = "Summer16_25nsV1_MC" #Fall17_25nsV1_MC elif year == 2017: globalTag = "Fall17_V3_MC" elif year == 2018: globalTag = "Autumn18_V1_MC" # READ JER and JER scale factors and uncertainties # from https://github.com/cms-jet/JRDatabase/tree/master/textFiles/ ) from JetMETCorrectionTool import ensureJMEFiles path_JER = ensureJMEFiles(globalTag, JER=True) filename = ensureFile(path_JER, "%s_PtResolution_%s.txt" % (globalTag, jetType)) filenameUnc = ensureFile(path_JER, "%s_SF_%s.txt" % (globalTag, jetType)) # LOAD LIBRARIES for accessing JER scale factors and uncertainties from txt files for library in [ "libCondFormatsJetMETObjects", "libPhysicsToolsNanoAODTools" ]: if library not in gSystem.GetLibraries(): print("Load Library '%s'" % library.replace("lib", "")) gSystem.Load(library) # INITIALIZE JER scale factors and uncertainties (cf. PhysicsTools/PatUtils/interface/SmearedJetProducerT.h ) print("Loading JER from file '%s'..." % filename) jer = PyJetResolutionWrapper(filename) print("Loading JER SFs and uncertainties from file '%s'..." % filenameUnc) jerSF_and_Uncertainty = PyJetResolutionScaleFactorWrapper(filenameUnc) self.path_JER = path_JER self.filename = filename self.filenameUnc = filenameUnc self.params_sf_and_uncertainty = PyJetParametersWrapper() self.params_resolution = PyJetParametersWrapper() self.jer = jer self.jerSF_and_Uncertainty = jerSF_and_Uncertainty self.jmr_vals = jmr_vals self.enums_shift = [0, 2, 1] if systematics else [0] # nom, up, down self.random = TRandom3(12345) # (needed for jet pT smearing)
def GenerateToyDataset( h_mass_temp ): h_toy = h_mass_temp.Clone("h_toy") h_toy.Reset() rand = TRandom3(0) # Loop over bins for i in range(1,h_mass_temp.GetNbinsX()+1): Nbin = rand.Poisson( h_mass_temp.GetBinContent(i) ) h_toy.SetBinContent( i,Nbin ) return h_toy
def calc_pi(n=1000, seed=1234): print('calculate pi with n = ', n, 'seed =', seed) rndm = TRandom3(seed) npass = 0 for i in xrange(n): x = rndm.Rndm() - 0.5 y = rndm.Rndm() - 0.5 r = (x**2 + y**2)**.5 if r < 0.5: npass += 1. pi = npass / n * 4. print('pi is', pi) return {'pi': pi}
def __init__(self): #resolution sigma_E/E, east and west self.sigE = 0.217 self.sigW = 0.306 self.s2E = 0.1 self.s2W = 0.005 #difference in mean for ADC, east and west self.deltE = 25.2 self.deltW = 11.5 #random generator self.rnd = TRandom3()
def return_rnd_Poisson(mu): ''' Returning a random poisson number lambda^{k} . e^{-lambda} Po() = ------------------------ k! k : events lambda : expected separation ''' gRandom = TRandom3() gRandom.SetSeed(0) # Cache for quicker running poisson = gRandom.Poisson rnd_po = poisson( mu ) return rnd_po
def __init__(self, parse, tree): #minumum and maximum photon energy, GeV self.emin = parse.getfloat("lgen", "emin") self.emax = parse.getfloat("lgen", "emax") print "emin =", self.emin print "emax =", self.emax #uniform generator for photon energies self.rand = TRandom3() self.rand.SetSeed(5572323) #set the output tree tnam = ["gen_E"] self.out = self.make_tree(tree, tnam) print "Uniform generator initialized"
def __init__(self, parse): #energy of electron beam, GeV self.Ee = parse.getfloat("main", "Ee") #proton beam, GeV self.Ep = parse.getfloat("main", "Ep") print("Ee =", self.Ee, "GeV") print("Ep =", self.Ep, "GeV") #minimal photon energy, GeV self.emin = parse.getfloat("main", "emin") print("emin =", self.emin) #electron and proton mass self.me = TDatabasePDG.Instance().GetParticle(11).Mass() self.mp = TDatabasePDG.Instance().GetParticle(2212).Mass() self.mep = self.me * self.mp #CMS energy squared, GeV^2 self.s = 2 * self.Ee * self.Ep + self.me**2 + self.mp**2 self.s += 2 * TMath.Sqrt(self.Ee**2 - self.me**2) * TMath.Sqrt(self.Ep**2 - self.mp**2) print("s =", self.s, "GeV^2") #normalization, 4 alpha r_e^2 self.ar2 = 4 * 7.297 * 2.818 * 2.818 * 1e-2 # m barn #parametrizations for dSigma/dy and dSigma/dtheta gRandom.SetSeed(5572323) self.eq1par = self.eq1(self) self.dSigDy = TF1("dSigDy", self.eq1par, self.emin / self.Ee, 1) tmax = 1.5e-3 #maximal photon angle self.eq3par = self.eq3(self) self.dSigDtheta = TF1("dSigDtheta", self.eq3par, 0, tmax) #uniform generator for azimuthal angles self.rand = TRandom3() self.rand.SetSeed(5572323) print("H1 parametrization initialized") print("Total cross section: " + str(self.dSigDy.Integral(self.emin / self.Ee, 1)) + " mb")
def ExpectedSignificance_ToyMC(n_MC,lumi=1.,profile='n'): masswindow = 7.15 signal = GetMassDistribution(125,lumi) bgr = GetMassDistribution(1,lumi) Delta_bgd=0 mean_bgd = bgr.Integral(bgr.FindBin(125-0.5*masswindow),bgr.FindBin(125+0.5*masswindow)) if profile=='y': Delta_bgd = 0.5*(0.06+0.07)*mean_bgd; mean_bgd*=1.104; mean_sig = signal.Integral(signal.FindBin(125-0.5*masswindow),signal.FindBin(125+0.5*masswindow)) print("----------------------------") print(" Background events: ",mean_bgd," +/- ",Delta_bgd) print(" Signal events: ",mean_sig) print("----------------------------") sign = 0. rand = TRandom3(42) count = 0. # Loop over MC cycles for i in range(0,n_MC): Mean_bgd = rand.Gaus(mean_bgd,Delta_bgd) N_bgd = rand.Poisson(mean_bgd) N_sig = rand.Poisson(mean_sig) # Calculate p-values pvalue = IntegratePoissonFromRight(N_bgd,N_bgd+N_sig) if(pvalue<=0 or pvalue>=1.): continue significance = ROOT.Math.gaussian_quantile_c(pvalue,1) sign += significance # print(sign," ",pvalue) # h_sign.Fill(significance) count += 1. sign /= count print("Expected significance after rescaling: ",sign)
def __init__(self, parse): #electron beam, GeV self.Ee = parse.getfloat("main", "Ee") #proton beam, GeV self.Ep = parse.getfloat("main", "Ep") print("Ee =", self.Ee, "GeV") print("Ep =", self.Ep, "GeV") #minimal photon energy, GeV self.emin = parse.getfloat("main", "emin") print("emin =", self.emin) #maximal photon angle self.tmax = 1.5e-3 if parse.has_option("main", "tmax"): self.tmax = parse.getfloat("main", "tmax") #electron and proton mass self.me = TDatabasePDG.Instance().GetParticle(11).Mass() self.mp = TDatabasePDG.Instance().GetParticle(2212).Mass() self.mep = self.me * self.mp #normalization, 4 alpha r_e^2 self.ar2 = 4*7.297*2.818*2.818*1e-2 # m barn #parametrizations for dSigma/dE_gamma and dSigma/dtheta gRandom.SetSeed(5572323) self.eq1par = self.eq1(self) self.dSigDe = TF1("dSigDe", self.eq1par, self.emin, self.Ee) self.theta_const = 1e-11 # constant term in theta formula self.eq2par = self.eq2(self) self.dSigDtheta = TF1("dSigDtheta", self.eq2par, 0, self.tmax) #uniform generator for azimuthal angles self.rand = TRandom3() self.rand.SetSeed(5572323) print("ZEUS parametrization initialized") print("Total cross section: "+str(self.dSigDe.Integral(self.emin, self.Ee))+" mb")
def __init__(self, cfg_ana, cfg_comp, looperName): super(ttHLepAnalyzerBase, self).__init__(cfg_ana, cfg_comp, looperName) if self.cfg_ana.doElectronScaleCorrections: tag = "Summer12_DR53X_HCP2012" if cfg_comp.isMC else "Moriond2013" self.electronEnergyCalibrator = ElectronEnergyCalibrator( tag, ## dataset True, # isAOD cfg_comp.isMC, # isMC True, # updateEnergyError, 999, # applyCorrections (999 = correct and/or smear for SC-based energy estimation) 0.607, # smearing ratio False, False, #verbose, sync TRandom3(0), ) # random number generator if hasattr(cfg_comp, 'efficiency'): self.efficiency = EfficiencyCorrector(cfg_comp.efficiency) self.relaxId = cfg_ana.relaxId if hasattr(cfg_ana, 'relaxId') else False
def make_histo(title, args, include_signal=True): """ Make and return a histogram describe by the above parameters """ histo = TH2F(title, "2D BumpHunter Test %s" % title, args.nbins, 0, args.nbins*args.binsize, args.nbins, 0, args.nbins*args.binsize) histo.GetXaxis().SetTitle("X") histo.GetXaxis().SetTitleOffset(2) histo.GetYaxis().SetTitle("Y") histo.GetYaxis().SetTitleOffset(2) rnd = TRandom3() rnd.SetSeed(0) for i in range(args.nbkg): histo.Fill(rnd.Exp(args.bkg_mean), rnd.Exp(args.bkg_mean)) if include_signal: for i in range(args.nsig): x, y = rnd.Gaus(args.sig_x, args.sig_spread_x), rnd.Gaus(args.sig_y, args.sig_spread_y) # print x, y histo.Fill(x, y) return histo
def select_n_events(self, n, random=True, key='EventStat'): if not self.lfns: return None if not self.lfns.values()[0].has_key(key): self.get_bk_metadata() selected = {} nsel = 0 if random: lfnList = self.lfn_list() rndm = TRandom3(0) while lfnList and nsel < n: i = int(rndm.Rndm() * len(lfnList)) lfn = lfnList[i] selected[lfn] = self.lfns[lfn] nsel += self.lfns[lfn][key] lfnList.pop(i) else: for lfn, info in self.lfns.iteritems(): selected[lfn] = info nsel += info[key] if nsel > n: break return LFNSet(selected)
def __init__(self, seed, wp='medium', measurement='central'): self.randm = TRandom3(seed) self.mc_eff_file = TFile( '$CMSSW_BASE/src/CMGTools/H2TauTau/data/tagging_efficiencies.root') # MC b-tag efficiencies as measured in HTT by Adinda self.btag_eff_b = self.mc_eff_file.Get('btag_eff_b') self.btag_eff_c = self.mc_eff_file.Get('btag_eff_c') self.btag_eff_oth = self.mc_eff_file.Get('btag_eff_oth') # b-tag SFs from POG calib = ROOT.BTagCalibration( "csvv2", os.path.expandvars( "$CMSSW_BASE/src/CMGTools/H2TauTau/data/CSVv2.csv")) op_dict = {'loose': 0, 'medium': 1, 'tight': 2} print 'Booking b/c reader' self.reader_bc = ROOT.BTagCalibrationReader(calib, op_dict[wp], "mujets", measurement) print 'Booking light reader' self.reader_light = ROOT.BTagCalibrationReader(calib, op_dict[wp], "incl", measurement)
def __init__(self): #scaling Poisson term self.a1E = 1.4 self.a1W = 2.8 #non-compensation self.a2E = 0.67 self.a2W = 2.5 #material constant, GeV self.E0E = 1.4 self.E0W = 1.4 #power law parameter self.lE = 0.7 self.lW = 0.3 #difference in mean for ADC, east and west self.deltE = 100.9 - 79.1 # J/psi mass self.deltW = 101.5 - 94.1 #random generator self.rnd = TRandom3()
def make_histo_1d(title, args, include_signal=True): """ Make and return a 1D TH2F. All the data will have a y coordinate of args.binsize/2.0 (to keep the data in the first bin) but the fit needs at least 4 points in y, so we make it 5 bins wide along y """ histo = TH2F(title, "1D BumpHunter2D Test %s" % title, args.nbins, 0, args.nbins * args.binsize, 5, 0, 5 * args.binsize) histo.GetXaxis().SetTitle("X") histo.GetXaxis().SetTitleOffset(2) histo.GetYaxis().SetTitle("Y") histo.GetYaxis().SetTitleOffset(2) rnd = TRandom3() rnd.SetSeed(0) for i in range(args.nbkg): histo.Fill(rnd.Exp(args.bkg_mean), args.binsize / 2.0) if include_signal: for i in range(args.nsig): x, y = rnd.Gaus(args.sig_x, args.sig_spread_x), args.binsize / 2.0 # print x, y histo.Fill(x, y) return histo