def generate_poly(self, b, low, high, nbins, nev): ws = RooWorkspace('ws', 'ws') ws.factory('Polynomial::bgPol(mass[200, 500], b[-0.001])') ws.var('mass').setRange(low,high) ws.var('b').setVal(b) _pdf = ws.pdf('bgPol') ws.var('mass').setBins(nbins) _ds = _pdf.generateBinned(RooArgSet(ws.var('mass')), nev) _h = _ds.createHistogram('hBgpol', ws.var('mass')) return _h
def generate_poly(self, b, low, high, nbins, nev): ws = RooWorkspace('ws', 'ws') ws.factory('Polynomial::bgPol(mass[200, 500], b[-0.001])') ws.var('mass').setRange(low, high) ws.var('b').setVal(b) _pdf = ws.pdf('bgPol') ws.var('mass').setBins(nbins) _ds = _pdf.generateBinned(RooArgSet(ws.var('mass')), nev) _h = _ds.createHistogram('hBgpol', ws.var('mass')) return _h
def generate_bw(self, peak, width, low, high, nbins, nev): ws = RooWorkspace('ws', 'ws') ws.factory('BreitWigner::bw(mass[200, 600], peak[350, 200, 700], width[50])') ws.var('mass').setRange(low,high) ws.var('peak').setVal(peak) ws.var('width').setVal(width) _bw = ws.pdf('bw') ws.var('mass').setBins(nbins) _ds = _bw.generateBinned(RooArgSet(ws.var('mass')), nev) _h = _ds.createHistogram('hBw', ws.var('mass')) return _h
def doMCFit(dataSet, x_var, addTitlePlot=''): cuts_str = '' data = dataSet.reduce( RooFit.Cut(cuts_str) ) x=RooRealVar(x_var, 'm_{#tau}',1757,1797,'MeV') numBins = 100 # define here so that if I change it also the ndof change accordingly x.setBins(numBins) ###################################################### # DEFINE PDF ###################################################### w = RooWorkspace('w') getattr(w,'import')(x) w.factory('''RooDSCBShape::DSCB({0}, #mu[1777, 1760,1790], #sigma[5,0,10], #alpha[1.2], n[50, 1, 150], #alpha, n )'''.format(x_var)) #w.var('n').setConstant(False) signal = w.pdf('DSCB') # w.factory('''RooGaussian::GG({0}, # #mu, #sigma # )'''.format(x_var)) # signal = w.pdf('GG') # Fit fit_region = x.setRange('fit_region',1757,1797) result = signal.fitTo(dataSet, RooFit.Save(), RooFit.Range('fit_region')) # Frame frame = x.frame(RooFit.Title(' Combined mass KK#mu '+addTitlePlot)) dataSet.plotOn(frame) signal.plotOn(frame, RooFit.LineWidth(2)) # Legends signal.paramOn(frame, RooFit.Layout(0.6,0.9,0.9)) chi2 = round(frame.chiSquare(),2) leg = TLegend(0.3,0,.10,.10) leg.SetBorderSize(0) leg.SetFillStyle(0) leg.AddEntry(0,'#chi^{2} ='+str(chi2),'') frame.addObject(leg) c1 = TCanvas('c1', 'c1') frame.Draw() c1.Update() for prm in ('#mu', '#sigma', '#alpha', 'n'): # TODO: automatize finding of variables from the function w.var(prm).setConstant() return w, c1
def generate_bw(self, peak, width, low, high, nbins, nev): ws = RooWorkspace('ws', 'ws') ws.factory( 'BreitWigner::bw(mass[200, 600], peak[350, 200, 700], width[50])') ws.var('mass').setRange(low, high) ws.var('peak').setVal(peak) ws.var('width').setVal(width) _bw = ws.pdf('bw') ws.var('mass').setBins(nbins) _ds = _bw.generateBinned(RooArgSet(ws.var('mass')), nev) _h = _ds.createHistogram('hBw', ws.var('mass')) return _h
import csv dataFileL0 = 'result_flatNtuple_LbL0_preSelection_noKinematicCut.root' dataFileTk = 'result_flatNtuple_LbTk_preSelection_noKinematicCut.root' outLog = 'log_calcSideband_eventExcludeSignalRegion' signalRegion = [5.54, 5.70] ptRegion = [ [20, 30, '20To30'], [30, 33, '30To33'], [33, 38, '33To38'], [38, 45, '38To45'], [45, 500, '45Tinf'], ] space = RooWorkspace('space', False) space.factory('lbl0Mass[5.4,5.9]') space.factory('lbtkMass[5.4,5.9]') space.factory('lbtkbarMass[5.4,5.9]') space.factory('lbl0Pt[20.,500.]') space.factory('lbtkPt[20.,500.]') space.factory('tktkMass[0.5,2.0]') writeColumn = [ 'channel', '20To30Val', '20To30Err', '30To33Val', '30To33Err', '33To38Val', '33To38Err', '38To45Val',
#!/usr/bin/env python from ROOT import TFile, TCanvas from ROOT import RooDataSet, RooWorkspace, RooArgSet tf = TFile.Open('AnalysisOut.root') tree = tf.Get('AnalysisTree') ws = RooWorkspace("w","w") observables = RooArgSet() ws.defineSet("observables",observables) ws.factory("mass[5050,6000]") getattr(ws,'set')("observables").add(ws.var("mass")) ws.factory("gamgams_pt[0,40e3]") getattr(ws,'set')("observables").add(ws.var("gamgams_pt")) mc = RooDataSet('mc','',getattr(ws,'set')('observables')) data = RooDataSet('data','',getattr(ws,'set')('observables')) for ev in range(tree.GetEntries()): tree.GetEntry(ev) if tree.itype != -88 and tree.itype!=72 and tree.itype!=82: continue if tree.bdtoutput<0.2: continue if tree.B0_MM < ws.var("mass").getMin() or tree.B0_MM > ws.var("mass").getMax(): continue ws.var("mass").setVal(tree.B0_MM) ws.var("gamgams_pt").setVal(tree.gamgams_PT) if tree.itype == -88:
def ControlDataFit(free=True, sim=True): w = RooWorkspace('w_ctrl', 'w_ctrl') samples = ['b0g', 'bsg', 'cg', 'b0pi0', 'bspi0', 'cpi0'] # create the category w.factory('cat[%s]' % (','.join(samples))) files = { 'b0g': path + "../New/Tuples/Data/9_B2DstKpi_Dst2DgammTuple_BestCut.root", 'b0pi0': path + "../New/Tuples/Data/9_B2Dstpi0Tuple_BestCut.root", 'bsg': path + "../New/Tuples/Data/9_Bs2DstKpi_Dst2DgammaTuple_BestCut.root", 'bspi0': path + "../New/Tuples/Data/9_Bs2Dstpi0Tuple_BestCut.root", 'cg': path + "../New/Tuples/Data/9_B2Dstpipi_Dst2DgammTuple_BestCut.root", 'cpi0': path + "../New/Tuples/Data/9_B2Dstpipi_Dst2Dpi0Tuple_No16_BestCut.root" } # Make the dsets w.factory("B_DTFDict_D0_B_M[5100,5900]") w.var("B_DTFDict_D0_B_M").setBins(80) for samp in samples: assert (os.path.exists(files[samp])) tf = TFile(files[samp]) t = tf.Get('DecayTree') t.SetBranchStatus("*", 0) t.SetBranchStatus("B_DTFDict_D0_B_M", 1) dset = RooDataSet("data_%s" % (samp), "", t, RooArgSet(w.var("B_DTFDict_D0_B_M"))) getattr(w, 'import')(dset) tf.Close() c = TCanvas('ctrl', 'ctrl', 2100, 1200) c.Divide(3, 2) for i, samp in enumerate(samples): pdfname = 'data_pdf_%s' % (samp) dsetname = 'data_%s' % (samp) plot(c.cd(i + 1), w, pdfname, dsetname) c.Update() c.Modified() c.Print("plots/ctrl.pdf") # Make the total pdf # First try and merge the different bits from the different workspaces ImportMCShapes(w) # now want to make the ctrl pdfs # signal and misrec are the same as b0 sig_cg = w.pdf('sig_mc_pdf_b0g').Clone('sig_mc_pdf_cg') getattr(w, 'import')(sig_cg) sig_cpi0 = w.pdf('sig_mc_pdf_b0pi0').Clone('sig_mc_pdf_cpi0') getattr(w, 'import')(sig_cpi0) misrec_cg = w.pdf('misrec_mc_pdf_b0g').Clone('misrec_mc_pdf_cg') getattr(w, 'import')(misrec_cg) misrec_cpi0 = w.pdf('misrec_mc_pdf_b0pi0').Clone('misrec_mc_pdf_cpi0') getattr(w, 'import')(misrec_cpi0) # ignore the lambdas for now # there will be some Bs0 -> D*0 piK stuff with a misID'd K (I think this is the big bit which appears below the peak) # do 1CB for this w.factory('dm_ctrl_misidk2pi[-150,-450,-50]') w.factory('sum::bdstkp_cg_mean( b0g_mean, dm_ctrl_misidk2pi)') w.factory('sum::bdstkp_cpi0_mean( b0pi0_mean, dm_ctrl_misidk2pi)') w.factory('bdstkp_cg_sigma[40,5,200]') w.factory('bdstkp_cpi0_sigma[40,5,200]') w.factory('bdstkp_c_alpha1[-2.1,-4,0]') w.factory('bdstkp_c_n1[3]') w.factory( 'CBShape::bdstkp_mc_pdf_cg( B_DTFDict_D0_B_M, bdstkp_cg_mean, bdstkp_cg_sigma, bdstkp_c_alpha1, bdstkp_c_n1 )' ) w.factory( 'CBShape::bdstkp_mc_pdf_cpi0( B_DTFDict_D0_B_M, bdstkp_cpi0_mean, bdstkp_cpi0_sigma, bdstkp_c_alpha1, bdstkp_c_n1 )' ) # the stuff above the peak is probably mostly B- -> D*0 pi- with another random pi so could get this shape and shift it w.factory('dm_ctrl_dstpi[100,10,300]') w.factory('sum::bdsth_cg_mean( bdsth_b0g_mean, dm_ctrl_dstpi )') w.factory('sum::bdsth_cpi0_mean( bdsth_b0pi0_mean, dm_ctrl_dstpi )') w.factory( "CBShape::bdsth_cg_cb1( B_DTFDict_D0_B_M, bdsth_cg_mean, bdsth_b0g_sigma, bdsth_alpha1, bdsth_n1 )" ) w.factory( "CBShape::bdsth_cg_cb2( B_DTFDict_D0_B_M, bdsth_cg_mean, bdsth_b0g_sigma, bdsth_alpha2, bdsth_n2 )" ) w.factory("SUM::bdsth_mc_pdf_cg( bdsth_f1*bdsth_cg_cb1, bdsth_cg_cb2 )") w.factory( "CBShape::bdsth_cpi0_cb1( B_DTFDict_D0_B_M, bdsth_cpi0_mean, bdsth_b0pi0_sigma, bdsth_alpha1, bdsth_n1 )" ) w.factory( "CBShape::bdsth_cpi0_cb2( B_DTFDict_D0_B_M, bdsth_cpi0_mean, bdsth_b0pi0_sigma, bdsth_alpha2, bdsth_n2 )" ) w.factory( "SUM::bdsth_mc_pdf_cpi0( bdsth_f1*bdsth_cpi0_cb1, bdsth_cpi0_cb2 )") # Then make combinatorial shape in each cateogry # let these be independent for now for samp in samples: w.factory("comb_mc_%s_p0[-0.001,-0.1,0.]" % samp) w.factory( "Exponential::comb_mc_pdf_%s( B_DTFDict_D0_B_M, comb_mc_%s_p0 )" % (samp, samp)) w.factory("%s_comb_y[3000,0,12000]" % samp) # Now need to figure out what yields to restrict # sig yield first (require b0 / bs ratio consistent between g and pi0) w.factory("b0g_sig_y[3000,0,12000]") w.factory("b0pi0_sig_y[800,0,4000]") w.factory("bs2b0_rat[2.5,1.,4.]") w.factory("prod::bsg_sig_y(b0g_sig_y, bs2b0_rat)") w.factory("prod::bspi0_sig_y(b0pi0_sig_y, bs2b0_rat)") w.factory("cg_sig_y[6000,0,32000]") w.factory("cpi0_sig_y[1000,0,8000]") # now mis rec yield (ratio of this to sig should be the same for b0 and bs but will be different for g vs pi0) w.factory("misrec_to_sig_rat_g[0.2,0.001,0.6]") w.factory("misrec_to_sig_rat_pi0[0.2,0.001,0.6]") w.factory("prod::b0g_misrec_y( misrec_to_sig_rat_g, b0g_sig_y )") w.factory("prod::b0pi0_misrec_y( misrec_to_sig_rat_pi0, b0pi0_sig_y )") w.factory("prod::bsg_misrec_y( misrec_to_sig_rat_g, bsg_sig_y )") w.factory("prod::bspi0_misrec_y( misrec_to_sig_rat_pi0, bspi0_sig_y )") w.factory("prod::cg_misrec_y( misrec_to_sig_rat_g, cg_sig_y )") w.factory("prod::cpi0_misrec_y( misrec_to_sig_rat_pi0, cpi0_sig_y )") # the cases of B->D*pipi, B->D*KK, Lb->D*ph all involve a misID so will # be different for B0 and Bs (as they differ with a K or pi misID) however # for all of these the ratio of g -> pi0 should be the same # there is also Bs->D*KK which should scale the same for g and pi0 modes w.factory("misid_g2pi0_rat[0.1,0.0001,10.]") w.factory("b0g_bdstpp_y[1000,0,12000]") w.factory("bsg_bdstpp_y[1000,0,12000]") w.factory("prod::b0pi0_bdstpp_y( misid_g2pi0_rat, b0g_bdstpp_y )") w.factory("prod::bspi0_bdstpp_y( misid_g2pi0_rat, bsg_bdstpp_y )") w.factory("b0g_bdstkk_y[1000,0,12000]") w.factory("bsg_bdstkk_y[1000,0,12000]") w.factory("prod::b0pi0_bdstkk_y( misid_g2pi0_rat, b0g_bdstkk_y )") w.factory("prod::bspi0_bdstkk_y( misid_g2pi0_rat, bsg_bdstkk_y )") w.factory("b0g_lbdstph_y[1000,0,12000]") w.factory("bsg_lbdstph_y[1000,0,12000]") w.factory("prod::b0pi0_lbdstph_y( misid_g2pi0_rat, b0g_lbdstph_y )") w.factory("prod::bspi0_lbdstph_y( misid_g2pi0_rat, bsg_lbdstph_y )") w.factory("bsdstkk_to_bdstkk_rat[1.,0.1,2.]") w.factory("prod::b0g_bsdstkk_y( bsdstkk_to_bdstkk_rat, b0g_bdstkk_y )") w.factory("prod::b0pi0_bsdstkk_y( bsdstkk_to_bdstkk_rat, b0pi0_bdstkk_y )") w.factory("prod::bsg_bsdstkk_y( bsdstkk_to_bdstkk_rat, bsg_bdstkk_y )") w.factory("prod::bspi0_bsdstkk_y( bsdstkk_to_bdstkk_rat, bspi0_bdstkk_y )") # B -> DKpi same logic as misrec w.factory("bdkp_to_sig_rat_g[0.2,0.001,0.6]") w.factory("bdkp_to_sig_rat_pi0[0.2,0.001,0.6]") w.factory("prod::b0g_bdkp_y( bdkp_to_sig_rat_g, b0g_sig_y )") w.factory("prod::b0pi0_bdkp_y( bdkp_to_sig_rat_pi0, b0pi0_sig_y )") w.factory("prod::bsg_bdkp_y( bdkp_to_sig_rat_g, bsg_sig_y )") w.factory("prod::bspi0_bdkp_y( bdkp_to_sig_rat_pi0, bspi0_sig_y )") # infact can be much more sophisitcated with efficiencies etc here # i.e. if the PID cut distinguishing B -> D0 Kpi from B -> D0 pipi is binary one knows the exact yield of the cross feed in each # the B -> DKp cross feed yield (float for now) w.factory("cg_bdstkp_y[3000,0,12000]") w.factory("cpi0_bdstkp_y[800,0,4000]") # B -> D* K / B -> D* pi (adding random pi- to B0 and random K- to Bs0) # so ratio to signal should be same for both g and pi0 modes but # different for B0 -> Bs w.factory("bdsth_to_sig_rat_addpi[0.2,0.001,0.6]") w.factory("bdsth_to_sig_rat_addk[0.2,0.001,0.6]") w.factory("prod::b0g_bdsth_y( bdsth_to_sig_rat_addpi, b0g_sig_y )") w.factory("prod::b0pi0_bdsth_y( bdsth_to_sig_rat_addpi, b0pi0_sig_y )") w.factory("prod::bsg_bdsth_y( bdsth_to_sig_rat_addk, bsg_sig_y )") w.factory("prod::bspi0_bdsth_y( bdsth_to_sig_rat_addk, bspi0_sig_y )") # the B- -> D* pi- (can probably constrain this better as well) w.factory("cg_bdsth_y[3000,0,12000]") w.factory("cpi0_bdsth_y[800,0,4000]") # Lb -> Dph (mid-ID k for p and pi for p and add random g or pi0) so will be different for all 4 really # express this ratio to the Lb -> D*ph one (they should be similar in magnitude?) w.factory("lbdph_to_lbdstph_b0g[1.,0.5,2.]") w.factory("lbdph_to_lbdstph_b0pi0[1.,0.5,2.]") w.factory("lbdph_to_lbdstph_bsg[1.,0.5,2.]") w.factory("lbdph_to_lbdstph_bspi0[1.,0.5,2.]") w.factory("prod::b0g_lbdph_y( lbdph_to_lbdstph_b0g, b0g_lbdstph_y )") w.factory("prod::b0pi0_lbdph_y( lbdph_to_lbdstph_b0pi0, b0pi0_lbdstph_y )") w.factory("prod::bsg_lbdph_y( lbdph_to_lbdstph_bsg, bsg_lbdstph_y )") w.factory("prod::bspi0_lbdph_y( lbdph_to_lbdstph_bspi0, bspi0_lbdstph_y )") # Part reco shape should have same Bs / B0 ratio w.factory("partrec_to_sig_rat_g[0.2,0.001,0.6]") w.factory("partrec_to_sig_rat_pi0[0.2,0.001,0.6]") w.factory("prod::b0g_partrec_y( partrec_to_sig_rat_g, b0g_sig_y )") w.factory("prod::b0pi0_partrec_y( partrec_to_sig_rat_pi0, b0pi0_sig_y )") w.factory("prod::bsg_partrec_y( partrec_to_sig_rat_g, bsg_sig_y )") w.factory("prod::bspi0_partrec_y( partrec_to_sig_rat_pi0, bspi0_sig_y )") # make the yields (different for ctrl and b0 / bs) b_components = [ 'sig', 'misrec', 'bdstpp', 'bdstkk', 'bsdstkk', 'bdkp', 'bdsth', 'partrec', 'lbdph', 'lbdstph', 'comb' ] for samp in ['b0g', 'b0pi0', 'bsg', 'bspi0']: fact_str = "SUM::data_pdf_%s(" % samp for comp in b_components: fact_str += "%s_%s_y*%s_mc_pdf_%s," % (samp, comp, comp, samp) fact_str = fact_str[:-1] + ")" w.factory(fact_str) w.pdf('data_pdf_%s' % samp).Print('v') ctrl_components = ['sig', 'misrec', 'bdstkp', 'bdsth', 'comb'] for samp in ['cg', 'cpi0']: fact_str = "SUM::data_pdf_%s(" % samp for comp in ctrl_components: fact_str += "%s_%s_y*%s_mc_pdf_%s," % (samp, comp, comp, samp) fact_str = fact_str[:-1] + ")" w.factory(fact_str) w.pdf('data_pdf_%s' % samp).Print('v') CreateSimPdf(w, 'data') CreateSimData(w, 'data') # Now fix appropriate parameters # To start with we'll fix all shape parameters from MC and just float the yields (and exponential slope) for comp in b_components: if comp == 'comb': continue # no pre-defined shape for combinatorial if comp == 'bsdstkk': continue # this params for this piece are covered by bdstkk w.set('%s_mc_sim_pdf_pars' % comp).setAttribAll("Constant") # Now relax the constraints on a few important params w.var("b0g_mean").setConstant(False) w.var("b0g_sigma").setConstant(False) #w.var("dm_b02bs").setConstant(False) #w.var("dm_g2pi0").setConstant(False) #w.var("ssig_b02bs").setConstant(False) #w.var("ssig_g2pi0").setConstant(False) #w.var("b0g_misrec_mean").setConstant(False) #w.var("b0g_misrec_sigma").setConstant(False) #w.var("dm_missg2addg").setConstant(False) #w.var("ssig_missg2addg").setConstant(False) w.Print('v') w.pdf('data_sim_pdf').Print('v') w.data('data_sim_data').Print('v') # free fit first if free: for i, samp in enumerate(samples): pdfname = 'data_pdf_%s' % (samp) dsetname = 'data_%s' % (samp) w.pdf(pdfname).fitTo( w.data(dsetname)) # nothing to fit in this case pars = w.pdf(pdfname).getParameters( RooArgSet(w.var("B_DTFDict_D0_B_M"))) w.saveSnapshot('data_free_fit_%s' % samp, pars) if sim: pdfname = 'data_sim_pdf' dsetname = 'data_sim_data' w.pdf(pdfname).fitTo(w.data(dsetname)) pars = w.pdf(pdfname).getParameters( RooArgSet(w.var("B_DTFDict_D0_B_M"))) w.saveSnapshot('data_sim_fit', pars) w.writeToFile('files/w_ctrl.root')
histograms["ZjLF_scale_jUp"].Fill(2, histograms["ZjLF"].Integral() * 1.00) histograms["ZjHF_scale_jUp"].Fill(2, histograms["ZjHF"].Integral() * 1.00) histograms["TT_scale_jUp"].Fill(2, histograms["TT"].Integral() * 1.00) histograms["ZH_scale_jDown"].Fill(2, histograms["ZH"].Integral() * 1.10) histograms["ZjLF_scale_jDown"].Fill(2, histograms["ZjLF"].Integral() * 1.00) histograms["ZjHF_scale_jDown"].Fill(2, histograms["ZjHF"].Integral() * 1.00) histograms["TT_scale_jDown"].Fill(2, histograms["TT"].Integral() * 1.00) histograms["ZH_statZHUp"].Fill(2, histograms["ZH"].Integral() * 1.50) histograms["ZjLF_statZjLFUp"].Fill(2, histograms["ZjLF"].Integral() * 1.50) histograms["ZjHF_statZjHFUp"].Fill(2, histograms["ZjHF"].Integral() * 1.50) histograms["TT_statTTUp"].Fill(2, histograms["TT"].Integral() * 1.50) histograms["ZH_statZHDown"].Fill(2, histograms["ZH"].Integral() * 0.50) histograms["ZjLF_statZjLFDown"].Fill(2, histograms["ZjLF"].Integral() * 0.50) histograms["ZjHF_statZjHFDown"].Fill(2, histograms["ZjHF"].Integral() * 0.50) histograms["TT_statTTDown"].Fill(2, histograms["TT"].Integral() * 0.50) # histograms["ZH_statZHDown"].Fill(2, histograms["ZH"].Integral()/1.50) # histograms["ZjLF_statZjLFDown"].Fill(2, histograms["ZjLF"].Integral()/1.50) # histograms["ZjHF_statZjHFDown"].Fill(2, histograms["ZjHF"].Integral()/1.50) # histograms["TT_statTTDown"].Fill(2, histograms["TT"].Integral()/1.50) outname = "datacards/simple/simple-shape-experiment.root" # outfile = TFile.Open(outname, "RECREATE") ws = RooWorkspace(channel, channel) ws.factory(var + "[0,5]") realvar = ws.var(var) obs = RooArgList(realvar) for n, h in histograms.iteritems(): datahist = RooDataHist(n, "", obs, h) getattr(ws, "import")(datahist) ws.writeToFile(outname)
#canv.GetPad(2).SetLogy() canv.SetFillColor(4000) canv.SetFillStyle(4000) #canv.GetPad(1).SetFillStyle(4000) #canv.GetPad(1).SetFillColor(4000) #canv.GetPad(2).SetFillStyle(4000) #canv.GetPad(2).SetFillColor(4000) return canv canv=NewCanvas() canv.SaveAs(outFig+'[') TGaxis.SetMaxDigits(3) space=RooWorkspace('space',False) space.factory('lbtkMass[5.1,8.]') space.factory('lbtkbarMass[5.1,8.]') space.factory('bdMass[4.0,7.]') space.factory('bdbarMass[4.0,7.]') #space.factory('bsMass[4.5,7.0]') space.factory('bsMass[5.0,7.5]') space.factory('tk1Pt[0.,100.]') space.factory('tk2Pt[0.,100.]') tk1Pt=space.var('tk1Pt') tk2Pt=space.var('tk2Pt') # ########## load workspace #################### # workspaceFile=TFile.Open('fitResTo2016Data__LbL0.root') # loadSpace=workspaceFile.Get('space') # loadSpace.SetName('loadSpace')
getattr(ws, 'import')(biasData) # if being executed run bias study if __name__ == '__main__': ntoys = int(sys.argv[1]) category = int(sys.argv[2]) mass = float(sys.argv[3]) channel = sys.argv[4] order = int(sys.argv[5]) turnon = sys.argv[6] #fitted turn on type!!! truth = sys.argv[7] #truth model type!!! bs = RooWorkspace('bias_study') bs.factory("procWeight[0]") bs.factory("puWeight[0]") bs.factory("weight[0]") bs.factory("Mzg[100,180]") bs.var("Mzg").setRange("ROI", mass - 1.5, mass + 1.5) bs.var("Mzg").setBins(40000, "cache") bs.factory("Mz[0]") #bs.factory("dMzg[0,25]") #bs.factory("dMz[0,25]") bs.factory("r94cat[cat1=1,cat2=2,cat3=3,cat4=4]") bs.defineSet("observables", "Mzg,Mz,r94cat,procWeight,puWeight") bs.defineSet("observables_weight", "Mzg,Mz,r94cat,procWeight,puWeight,weight") prepare_truth_models(bs, category, mass, channel, turnon, truth)
def setup_workspace(): import ROOT from ROOT import RooWorkspace, gROOT, gStyle, RooAbsReal, RooMsgService, RooFit #from ROOT import RooFit, gROOT, gDirectory, gStyle, gPad, TTree, RooCmdArg,RooBinning #from ROOT import RooRealVar, RooMappedCategory, RooCategory, RooFormulaVar, RooAbsData #from ROOT import RooBMixDecay, RooMCStudy, RooAddModel, RooEffProd, RooMsgService #from ROOT import RooWorkspace, TCanvas, TFile, kFALSE, kTRUE, RooDataSet, TStopwatch #from ROOT import RooArgSet, RooArgList, RooRandom, RooMinuit, RooAbsReal, RooDataHist #from ROOT import TBrowser, TH2F, TF1, TH1F, RooGenericPdf, RooLinkedList gROOT.SetStyle("Plain") gStyle.SetPalette(1) gStyle.SetOptStat(0) gStyle.SetOptFit(0) gStyle.SetOptStat(1111) gStyle.SetOptFit(10111) gStyle.SetOptTitle(1) #RooAbsReal.defaultIntegratorConfig().Print() #RooAbsReal.defaultIntegratorConfig().setEpsAbs(1e-10) #RooAbsReal.defaultIntegratorConfig().setEpsRel(1e-10) RooAbsReal.defaultIntegratorConfig().Print() print "Numeric integration set up" #TODO: is the integration acceptable? ##This controls the logging output from RooFit #RooMsgService.instance().addStream(RooFit.DEBUG,RooFit.Topic(RooFit.Fitting)) RooMsgService.instance().deleteStream(1) #RooMsgService.instance().addStream(RooFit.INFO,RooFit.Topic(RooFit.Generation + RooFit.Minization + RooFit.Plotting + RooFit.Fitting + RooFit.Integration + RooFit.LinkStateMgmt + RooFit.Eval + RooFit.Caching + RooFit.Optimization + RooFit.ObjectHandling + RooFit.InputArguments + RooFit.Tracing + RooFit.Contents + RooFit.DataHandling + RooFit.NumericIntegration)) RooMsgService.instance().addStream(RooFit.INFO,RooFit.Topic(RooFit.LinkStateMgmt + RooFit.Caching + RooFit.ObjectHandling + RooFit.InputArguments + RooFit.Tracing)) RooMsgService.instance().Print() print "Message service set up" w = RooWorkspace("w",False) w.factory("RAND[0,1]") D0_M = w.factory("D0_M[1800,1930]") #TODO: define mass ranges slightly better Del_M = w.factory("Del_M[139,155]") D0_M.setUnit("MeV") Del_M.setUnit("MeV") for dst_side in ["", "dstsig", "dsthigh", "dstlow"]: for d_side in ["", "dsig", "dhigh", "dlow"]: name = dst_side+d_side if dst_side == "dsthigh": Del_M.setRange(name,148.,155.) elif dst_side == "dstsig": Del_M.setRange(name,143.,148.) elif dst_side == "dstlow": Del_M.setRange(name,139.,143.) if d_side == "dhigh": Del_M.setRange(name,1885.,1930.) elif d_side == "dsig": Del_M.setRange(name,1835.,1885.) elif d_side == "dlow": Del_M.setRange(name,1800.,1835.) w.defineSet("args","D0_M,Del_M") w.defineSet("argsPreCut","D0_M,Del_M,RAND") w.factory("RooGaussian::D0M_Sig_Gaus1(D0_M,D0M_Sig_Gaus_Mean[1865,1850,1880],D0M_Sig_Gaus_Sigma1[10,1,30])") w.factory("RooGaussian::D0M_Sig_Gaus2(D0_M,D0M_Sig_Gaus_Mean,D0M_Sig_Gaus_Sigma2[3,1,30])") w.factory("SUM::D0M_Sig_Gaus(D0M_Sig_Gaus1_Frac[0.8,0,1]*D0M_Sig_Gaus1,D0M_Sig_Gaus2)") w.factory("RooGaussian::D0M_MisId_Gaus1(D0_M,D0M_MisId_Gaus_Mean[1800,1740,1820],D0M_Sig_Gaus_Sigma1)") w.factory("RooGaussian::D0M_MisId_Gaus2(D0_M,D0M_MisId_Gaus_Mean,D0M_Sig_Gaus_Sigma2)") w.factory("SUM::D0M_MisId_Gaus(D0M_Sig_Gaus1_Frac*D0M_MisId_Gaus1,D0M_MisId_Gaus2)") w.factory("RooChebychev::D0M_Bkg_Poly(D0_M,{D0M_Bkg_Poly_a1[0,-1,1]})") w.factory("RooGaussian::DelM_Sig_Gaus1(Del_M,DelM_Sig_Gaus_Mean[145.5,143,148],DelM_Sig_Gaus_Sigma1[1,0,5] )") w.factory("RooGaussian::DelM_Sig_Gaus2(Del_M,DelM_Sig_Gaus_Mean,DelM_Sig_Gaus_Sigma2[.1,0,2] )") w.factory("SUM::DelM_Sig_Gaus(DelM_Sig_Gaus1_Frac[0.8,0,1]*DelM_Sig_Gaus1,DelM_Sig_Gaus2)") w.factory("RooDstD0BG::DelM_Bkg(Del_M,DelM_Bkg_m0[139.5,134,144],DelM_Bkg_c[80,0,1000],DelM_Bkg_a[-1,-100,10],DelM_Bkg_b[0.2,-0.2,10])") w.factory("PROD::Sig(DelM_Sig_Gaus,D0M_Sig_Gaus)") w.factory("PROD::Comb(DelM_Bkg,D0M_Bkg_Poly)") w.factory("PROD::MisId(DelM_Sig_Gaus,D0M_MisId_Gaus)") w.factory("PROD::Prompt(DelM_Bkg,D0M_Sig_Gaus)") w.factory("SUM::Final_PDF(N_Sig[10000,0,50000]*Sig,N_Prompt[5000,0,20000]*Prompt,N_Comb[10000,0,50000]*Comb,N_MisId[500,0,5000]*MisId)") return w
from ROOT import RooWorkspace workspace = RooWorkspace("electron_channel_2orMoreBtags") workspace.factory('lepton_AbsoluteEta[0]') workspace.factory('lumi[0]') workspace.factory('n_signal[2200,0,10000]') workspace.factory('n_VPlusJets[200,0,10000]') workspace.factory('n_QCD[10,0,10000]') workspace.factory('sum::yield(n_signal,n_VPlusJets,n_QCD)') workspace.factory("Poisson::model_core(n,yield)") workspace.factory("lumi[0]") # cross section - parameter of interest workspace.factory("xsec[0,0,0.1]") # selection efficiency * acceptance workspace.factory("efficiency[0]") # signal yield workspace.factory("prod::nsig(lumi,xsec,efficiency)") workspace.factory("Uniform::prior(xsec)") workspace.Print() workspace.SaveAs('electron_channel_2orMoreBtags.root')
## Here starts the meat. nentries = -1 ## Pairs of photon scale and extra smearing. sTest = [-2, 0.5] rTest = [1, 0.5] phoPtRange = (12,15) chains = getChains('v11') mcTree = chains['z'] w = RooWorkspace('w') ## Define variables mmgMass = w.factory('mmgMass[40, 180]') mmgGenMass = w.factory('mmgGenMass[0, 300]') mmgMassPhoGenE = w.factory('mmgMassPhoGenE[0, 300]') phoERes = w.factory('phoERes[-1,10]') mmMass = w.factory('mmMass[10, 180]') weight = w.factory('weight[1]') phoScale = w.factory('phoScale[0,-50,50]') weight.SetTitle('pileup.weight') ## Photon scaling fraction, dlog(m_uuy)/dlog(E_y) phoF = w.factory('phoF[0.15 * 91.2, 0, 100]') phoFFunc = w.factory('''FormulaVar::phoFFunc( "mmgMass * (0.5 - 0.5 * mmMass^2 / mmgMass^2)", {mmMass, mmgMass} )''')
from ROOT import RooFit, RooWorkspace, RooDataSet, kDashed, TBrowser w = RooWorkspace("w", True) w.factory("Gaussian::gauss(mes[5.20,5.30],mean[5.28,5.2,5.3],width[0.0027,0.001,1])") w.factory("ArgusBG::argus(mes,5.291,argpar[-20,-100,-1])") w.factory("SUM::sum(nsig[200,0,10000]*gauss,nbkg[800,0,10000]*argus)") #--- Generate a toyMC sample from composite PDF --- data = w.function('sum').generate(w.argSet('mes'), 2000) #--- Perform extended ML fit of composite PDF to toy data --- w.function('sum').fitTo(data) # --- Plot toy data and composite PDF overlaid --- mesframe = w.var('mes').frame() data.plotOn(mesframe) w.function('sum').plotOn(mesframe) w.function('sum').plotOn(mesframe, RooFit.Components('argus'), RooFit.LineStyle(kDashed)) mesframe.Draw() mesframe.Browse(TBrowser()) print 'nsig:',w.var('nsig').getValV(), '+-', w.var('nsig').getError() print 'nbkg:', w.var('nbkg').getValV(), '+-', w.var('nbkg').getError() print 'mes:', w.var('mes').getValV(), '+-', w.var('mes').getError() print 'mean:', w.var('mean').getValV(), '+-', w.var('mean').getError() print 'width:', w.var('width').getValV(), '+-', w.var('width').getError() print 'argpar:', w.var('argpar').getValV(), '+-', w.var('argpar').getError() from time import sleep sleep(5)
from ROOT import gPad, RooWorkspace params = Wjj2DFitterPars() utils = Wjj2DFitterUtils(params) # dataHist = utils.File2Hist(params.MCDirectory + \ # 'RD_mu_HWWMH250_CMSSW525_private.root', # 'dataHist') # dataHist.Print() # dataHist.Draw('colz') # gPad.Update() # gPad.WaitPrimitive() theWS = RooWorkspace() theWS.factory('%s[%f,%f]' % (params.var[0], params.varRanges[params.var[0]][1], params.varRanges[params.var[0]][2])) theWS.factory('%s[%f,%f]' % (params.var[1], params.varRanges[params.var[1]][1], params.varRanges[params.var[1]][2])) theWS.defineSet('obsSet', ','.join(params.var)) #theWS.Print() # utils.Hist2Pdf(dataHist, "H250SignalHist", theWS) #theWS.Print() # dataset = utils.File2Dataset(params.MCDirectory + \ # 'RD_mu_HWWMH250_CMSSW525_private.root', # "H250SignalData", theWS) # dataset.Print()
def plotStuff(plotList,plotstring, cutstring, plotfile, plotname, xlabel, ylabel, unitnorm): isFirst = True w = RooWorkspace("w") w.factory("x[-100,100]") for dataset in plotList: dataset[0].Draw(plotstring.format(dataset[1]),cutstring,"goff") hist = gDirectory.Get(dataset[1]) data=RooDataHist(dataset[1],dataset[1],RooArgList(w.var("x")),hist) getattr(w,'import')(data) w.factory("HistPdf::triPdf(x,tri)") w.factory("HistPdf::wabPdf(x,wab)") w.factory("prod::triscale(a[0.3,0,10],{0})".format(w.data("tri").sum(False))) w.factory("prod::wabscale(b[0.1,0,10],{0})".format(w.data("wab").sum(False))) w.factory("SUM::sumModel(triscale*triPdf,wabscale*wabPdf)") w.pdf("sumModel").fitTo(w.data("data"),RooFit.SumW2Error(True),RooFit.Extended(True), RooFit.Verbose(False),RooFit.PrintLevel(-1)) #w.pdf("sumModel").fitTo(w.data("data"),RooFit.Extended(True)) #w.pdf("sumModel").fitTo(w.data("data")) frame=w.var("x").frame() w.data("data").plotOn(frame) #w.pdf("triPdf").plotOn(frame) #w.pdf("wabPdf").plotOn(frame) w.pdf("sumModel").plotOn(frame) w.pdf("sumModel").paramOn(frame) frame.SetTitle(gDirectory.Get("data").GetTitle()) frame.Draw() c.Print(plotfile) c.Clear() dataHist = gDirectory.Get("data") triHist = gDirectory.Get("tri") wabHist = gDirectory.Get("wab") if legendright: leg = TLegend(0.7,0.75,0.9,0.9) else: leg = TLegend(0.1,0.75,0.3,0.9) hs = THStack("hs",plotname); for dataset in plotList: hist = gDirectory.Get(dataset[1]) #hist.Sumw2() if unitnorm: hist.Scale(1.0/hist.Integral()) else: hist.Scale(1.0/dataset[2]) print "{0} {1} {2}".format(plotname,dataset[4],hist.Integral()) hist.SetLineColor(dataset[3]) leg.AddEntry(hist,dataset[4]) hs.Add(hist) #hist.GetXaxis().SetTitle(xlabel) hist.GetYaxis().SetTitle(ylabel) #if isFirst: #hist.GetXaxis().SetTitle(xlabel) #hist.GetYaxis().SetTitle(ylabel) #hist.Draw() #else: #hist.Draw("same") isFirst = False sumHist = triHist.Clone("sum") sumHist.Add(wabHist) if unitnorm: sumHist.Scale(1.0/sumHist.Integral()) sumHist.SetLineColor(6) leg.AddEntry(sumHist,"MC sum") hs.Add(sumHist) hs.Draw("nostack") hs.GetXaxis().SetTitle(xlabel) hs.GetYaxis().SetTitle(ylabel) leg.Draw() c.Print(plotfile)
gROOT.LoadMacro("tools.C+") setattr(RooWorkspace, "Import", getattr(RooWorkspace, "import")) ## Here starts the meat. nentries = -1 sRange = (-10, 10) phoPtRange = (12, 15) chains = getChains('v11') mcTree = chains['z'] w = RooWorkspace('w') mmgMass = w.factory('mmgMass[40, 140]') mmMass = w.factory('mmMass[10, 140]') weight = w.factory('weight[1]') weight.SetTitle('pileup.weight') ## List the cuts with looser window on mmgMass to allow for scale changes fRange = fMin, fMax = 1. + sRange[0]/100., 1. + sRange[1]/100. lo = 'scaledMmgMass3(%f, mmgMass, mmMass)' % fMin hi = 'scaledMmgMass3(%f, mmgMass, mmMass)' % fMax cuts = ['%f < %s & %s < %f' % (mmgMass.getMin(), lo, hi, mmgMass.getMax()), '%f < mmMass & mmMass < %f' % (mmMass.getMin(), mmMass.getMax()), #'%f < m1gMass & m1gMass < %f' % (m1gMass.getMin(), m1gMass.getMax()), #'%f < m2gMass & m2gMass < %f' % (m2gMass.getMin(), m2gMass.getMax()), #'12 < phoPt & phoPt < 15', 'phoIsEB',
zcutmasses=array.array('d') zerobackgroundzcut=array.array('d') num_tridents=array.array('d') num_ap=array.array('d') num_ap_pastzcut=array.array('d') #h1mass=TH1I( #yieldhist=TH2D("yield","yield",totalH.GetNbinsX(),totalH.GetXaxis().GetXmin(),totalH.GetXaxis().GetXmax(),30,-10,-7) n_massbins=20 minmass=0.015 maxmass=0.06 yieldhist=TH2D("yield","yield",n_massbins,minmass,maxmass,30,-10,-7) w = RooWorkspace("w") w.factory("uncM[0,0.1]") w.factory("uncVZ[-100,100]") w.factory("uncP[0,10]") w.factory("bscChisq[-100,100]") w.factory("minIso[-100,100]") w.factory("eleFirstHitX[-100,100]") w.factory("posFirstHitX[-100,100]") w.factory("eleP[-100,100]") w.factory("posP[-100,100]") w.factory("bscPY[-100,100]") w.factory("bscPX[-100,100]") w.factory("cut[0,1]") #uncM = RooRealVar("uncM","uncM",0,0.1) #uncVZ = RooRealVar("uncVZ","uncVZ",-100,100) w.defineSet("allVars","uncM,uncVZ,uncP,bscChisq,minIso,eleFirstHitX,posFirstHitX,eleP,posP,bscPY,bscPX")
def rooFit502(): print ">>> create workspace..." workspace = RooWorkspace("workspace", "workspace") print ">>> create typedef (shorthands)..." workspace.factory("$Typedef(Gaussian,Gaus)") workspace.factory("$Typedef(Chebychev,Cheby)") print ">>> operator pdf examples..." print ">>> SUM (coef1*pdf1,pdf2) - pdf addition" workspace.factory( "SUM::summodel( f[0,1]*Gaussian::gx(x[-10,10],m[0],1.0), Chebychev::ch(x,{0.1,0.2,-0.3}) )" ) print ">>> SUM (yield1*pdf1,yield2*pdf2) - extended pdf addition" workspace.factory("SUM::extsummodel( Nsig[0,1000]*gx, Nbkg[0,1000]*ch )") print ">>> PROD ( pdf1, pdf2 ) - pdf multiplication" # PDF multiplication is done with PROD ( pdf1, pdf2 ) workspace.factory("PROD::gxz( gx, Gaussian::gz(z[-10,10],0,1) )") print ">>> PROD ( pdf1|obs, pdf2 ) - conditional p.d.f multiplication" workspace.factory("Gaussian::gy( y[-10,10], x, 1.0 )") workspace.factory("PROD::gxycond( gy|x, gx )") print ">>> NCONV (obs,pdf1,pdf2) - numeric convolution" print ">>> FCONV (obs,pdf1,pdf2) - fft convolution" workspace.factory( "FCONV::lxg( x, Gaussian::g(x,mg[0],1), Landau::lc(x,0,1) )") print ">>> SIMUL( index, state1=pdf1, state2=pdf2,...) - simultaneous pdfs are constructed" workspace.factory( "SIMUL::smodel( c[A=0,B=1], A=Gaussian::gs(x,m,s[1]), B=Landau::ls(x,0,1) )" ) print ">>> operator function examples..." print ">>> prod (func1, func2,...) - function multiplication" workspace.factory("prod::uv(u[10],v[10])") print ">>> sum (func1, func2,...) - function addition" workspace.factory("sum::uv2(u,v)") print ">>> interpreted and compiled expression based pdfs..." print ">>> create a RooGenericPdf interpreted pdf by using single quotes to pass the expression string argument" workspace.factory("EXPR::G('x*x+1',x)") # Create a custom compiled p.d.f similar to the above interpreted p.d.f. # The code required to make this p.d.f. is automatically embedded in the workspace workspace.factory("CEXPR::GC('x*x+a',{x,a[1]})") # Compiled and interpreted functions (rather than pdfs) can be made with the lower case # 'expr' and 'cexpr' types print ">>> print workspace contents:" workspace.Print() print "\n>>> save workspace in memory..." gDirectory.Add(workspace)
tPDF['content'].plotOn(plotframeForPull,RooFit.Name('totModel'),RooFit.Normalization(kwargs['absNumNormalize'],RooAbsReal.NumEvent)) #plotframeForPull.Draw() plotframeForPull.Write(label+'ForPull') print '------SaveResult End {0}'.format(label) return None inF=TFile.Open(dataFile) canv=TCanvas('c1','c1',1000,1000) canv.SetFillColor(4000) canv.SetFillStyle(4000) canv.SaveAs('tmpLbL0.pdf'+'[') TGaxis.SetMaxDigits(3) space=RooWorkspace('space',False) space.factory('lbl0Mass[5.4,5.9]') space.factory('tktkMass[0.5,2.0]') space.factory('lbl0Pt[0.,200.]') space.factory('data_MC_factor[0.87]') # fit lbl0Mass in LbL0 MC {{{ if fitToLbMass_LbL0MC: fitLabel='lbl0Dist_lbl0MC' mass=space.var('lbl0Mass') mass.setRange(fitLabel,5.5,5.75) inN=inF.Get('pLbL0/LbL0') dataset=RooDataSet('dataset','dataset',inN,RooArgSet(mass))
from ROOT import RooFit, RooWorkspace, RooDataSet, kDashed, TBrowser w = RooWorkspace("w", True) w.factory( "Gaussian::gauss(mes[5.20,5.30],mean[5.28,5.2,5.3],width[0.0027,0.001,1])") w.factory("ArgusBG::argus(mes,5.291,argpar[-20,-100,-1])") w.factory("SUM::sum(nsig[200,0,10000]*gauss,nbkg[800,0,10000]*argus)") #--- Generate a toyMC sample from composite PDF --- data = w.function('sum').generate(w.argSet('mes'), 2000) #--- Perform extended ML fit of composite PDF to toy data --- w.function('sum').fitTo(data) # --- Plot toy data and composite PDF overlaid --- mesframe = w.var('mes').frame() data.plotOn(mesframe) w.function('sum').plotOn(mesframe) w.function('sum').plotOn(mesframe, RooFit.Components('argus'), RooFit.LineStyle(kDashed)) mesframe.Draw() mesframe.Browse(TBrowser()) print 'nsig:', w.var('nsig').getValV(), '+-', w.var('nsig').getError() print 'nbkg:', w.var('nbkg').getValV(), '+-', w.var('nbkg').getError() print 'mes:', w.var('mes').getValV(), '+-', w.var('mes').getError() print 'mean:', w.var('mean').getValV(), '+-', w.var('mean').getError() print 'width:', w.var('width').getValV(), '+-', w.var('width').getError() print 'argpar:', w.var('argpar').getValV(), '+-', w.var('argpar').getError() from time import sleep sleep(5)
def makeFit(dataSet, x_var): # Fit m_DTF_Phi gStyle.SetOptFit(1111) #histo = histos['m_DTF_Phi'] #x_var = 'Tau_DTF_Phi_M' #'Phi_M' w = RooWorkspace('w') x = w.factory(x_var+'[1008,1032]') x.setUnit('MeV') x.SetTitle('m_{kk}') x.setBins(200) # x = RooRealVar(x_var, 'm_{#Phi}', 1008,1032, 'MeV') # #x = RooRealVar(x_var, 'm_{#Phi}', 1010,1027, 'MeV') # x.setBins(200) # #ral = RooArgList(x) # #dh = RooDataHist ("dh","dh",ral,RooFit.Import(histo)) # Signal # signal = w.factory('''RooRelBreitWigner::signal('''+x_var+''', # #mu[1020,1010,1025], # #Gamma[3,0.1,10], # J[1], radius[0.003], # m_K[493.677],m_K # )''') # signal = w.factory('''RooRelBreitWigner::signal('''+x_var+''', # #mu[1020,1010,1025], # #Gamma[3,0.1,10], # J[1], radius[0.003, 0., 0.1], # m_K[493.677, 450, 550],m_K # )''') # signal = w.factory('''RooBreitWigner::signal('''+x_var+''', # #mu[1020,1010,1025], # #Gamma[3,0.1,10] # )''') # signal = w.factory('''RooVoigtian::signal('''+x_var+''', # #mu[1020,1010,1025], # #Gamma[3,0.1,10], # #sigma[3,0.1,10] # )''') w.factory('''RooRelBreitWigner::T('''+x_var+''', #mu[1020,1010,1025], #Gamma[4,0.1,10], J[1], radius[0.003], m_K[493.677],m_K )''') w.factory('''RooGaussian::R('''+x_var+''', m_r[0], #sigma[2,0.1,10] )''') x.setBins(10000,'cache') signal = w.factory('FCONV::signal('+x_var+',T,R)') # Background a1 = RooRealVar('a1','a1',0.4,0.,1.) #a2 = RooRealVar('a2','a2',0.1,-1.,1.) #-0.2,0.,1.) #a3 = RooRealVar('a3','a3',-0.1,1.,-1.) esp = RooRealVar('esp','esp',0.,-0.5,0.5) #,0.5,0.,1.) xm = RooFormulaVar('xm','@0-1010',RooArgList(x)) background = RooPolynomial('background','Background',xm,RooArgList(a1)) #background = RooPolynomial('background','Background',xm,RooArgList()) #background = RooExponential('background','Background',x,esp) getattr(w,'import')(background) # Toghether w.factory('''SUM::modelPdf( ratio_SB[0.8, 0, 1] * signal, background)''') modelPdf = w.pdf('modelPdf') #modelPdf = w.pdf('signal') # Fit fit_region = x.setRange("fit_region",1011,1027)#1013,1027) result = modelPdf.fitTo(dataSet, RooFit.Save(), RooFit.Range("fit_region")) # Frame title = ' #Phi mass '+('DTF' if x_var=='Tau_DTF_Phi_M' else '') frame = x.frame(RooFit.Title(title)) dataSet.plotOn(frame) modelPdf.paramOn(frame, RooFit.Layout(0.1,0.44,0.9)) signal_set = RooArgSet(signal) modelPdf.plotOn(frame,RooFit.Components(signal_set), RooFit.LineColor(ROOT.kGreen+2), RooFit.LineStyle(2), RooFit.LineWidth(1)) background_set = RooArgSet(background) modelPdf.plotOn(frame,RooFit.Components(background_set), RooFit.LineColor(ROOT.kBlack), RooFit.LineStyle(2), RooFit.LineWidth(1)) modelPdf.plotOn(frame, RooFit.LineWidth(2)) chi2 = round(frame.chiSquare(),2) leg = TLegend(0.3,0,.10,.10) leg.SetBorderSize(0) leg.SetFillStyle(0) leg.AddEntry(0,'#chi^{2} ='+str(chi2),'') frame.addObject(leg) c1 = TCanvas('c1', 'c1') frame.Draw() c1.Update() return c1
from ROOT import gPad, RooWorkspace params = Wjj2DFitterPars() utils = Wjj2DFitterUtils(params) # dataHist = utils.File2Hist(params.MCDirectory + \ # 'RD_mu_HWWMH250_CMSSW525_private.root', # 'dataHist') # dataHist.Print() # dataHist.Draw('colz') # gPad.Update() # gPad.WaitPrimitive() theWS = RooWorkspace() theWS.factory('%s[%f,%f]' % (params.var[0], params.varRanges[params.var[0]][1], params.varRanges[params.var[0]][2])) theWS.factory('%s[%f,%f]' % (params.var[1], params.varRanges[params.var[1]][1], params.varRanges[params.var[1]][2])) theWS.defineSet('obsSet', ','.join(params.var)) #theWS.Print() # utils.Hist2Pdf(dataHist, "H250SignalHist", theWS) #theWS.Print() # dataset = utils.File2Dataset(params.MCDirectory + \ # 'RD_mu_HWWMH250_CMSSW525_private.root', # "H250SignalData", theWS) # dataset.Print()
## Here starts the meat. nentries = 50000 ## Pairs of photon scale and extra smearing. phoPtRange = (5,100) chains = getChains('v11') mcTree = chains['z'] w = RooWorkspace('w') massShift = 90 + 1.03506 ## Define variables phoPt = w.factory('phoPt[0,100]') mmgMass = w.factory('mmgMass[50,130]') mmgGeom = w.factory('mmgGeom[0,10]') mmgGeom.SetTitle('(mmgMass^2 - mmMass^2)/mmMass/phoPt') weight = w.factory('weight[1]') weight.SetTitle('pileup.weight') cuts = ['%f < phoPt & phoPt < %f' % phoPtRange, 'phoIsEB', 'phoR9 < 0.94', 'mmMass + mmgMass < 190', 'isFSR', ] ## Add an optional cut on number of entries if nentries > 0:
class Wjj2DFitter: def __init__ (self, pars): self.pars = pars self.ws = RooWorkspace('wjj2dfitter') self.utils = Wjj2DFitterUtils(self.pars) self.useImportPars = False self.rangeString = None obs = [] for v in self.pars.var: try: vName = self.pars.varNames[v] except AttributeError: vName = v obs.append(vName) var1 = self.ws.factory('%s[%f,%f]' % (vName, self.pars.varRanges[v][1], self.pars.varRanges[v][2]) ) var1.setUnit('GeV') try: var1.SetTitle(self.pars.varTitles[v]) except AttributeError: var1.SetTitle('m_{jj}') var1.setPlotLabel(var1.GetTitle()) if len(self.pars.varRanges[v][3]) > 1: vbinning = RooBinning(len(self.pars.varRanges[v][3]) - 1, array('d', self.pars.varRanges[v][3]), '%sBinning' % vName) var1.setBinning(vbinning) else: var1.setBins(self.pars.varRanges[v][0]) var1.Print() if v in self.pars.exclude: var1.setRange('signalRegion', self.pars.exclude[v][0], self.pars.exclude[v][1]) var1.setRange('lowSideband', var1.getMin(), self.pars.exclude[v][0]) var1.setRange('highSideband', self.pars.exclude[v][1], var1.getMax()) self.rangeString = 'lowSideband,highSideband' self.ws.defineSet('obsSet', ','.join(obs)) def loadDataFromWorkspace(self, other, cut = None): #pull unbinned data from other workspace unbinnedData = other.data('data_unbinned') if not unbinnedData: unbinnedData = other.data('data_obs') if cut: unbinnedData = unbinnedData.reduce(cut) unbinnedData.Print() if self.pars.binData: #bin and import data unbinnedData.SetName('data_unbinned') getattr(self.ws, 'import')(unbinnedData) data = RooDataHist('data_obs', 'data_obs', other.set('obsSet'), unbinnedData) getattr(self.ws, 'import')(data) else: #just import data unbinnedData.SetName('data_obs') getattr(self.ws, 'import')(unbinnedData) def loadHistogramsFromWorkspace(self, other): #pull RooHist pdfs from other workspace pdfs = other.allPdfs() pdfIter = pdfs.createIterator() pdf = pdfIter.Next() while pdf: if pdf.IsA().InheritsFrom('RooHistPdf'): print 'importing',pdf.GetName(),'from old workspace' getattr(self.ws, 'import')(pdf) pdf = pdfIter.Next() def loadWorkspaceFromFile(self, filename, wsname = 'w', getFloatPars = True): print 'loading data workspace %s from file %s' % (wsname, filename) fin = TFile.Open(filename) if not fin: print 'failed to open the file',filename import os print 'cwd:',os.getcwd() print 'access of',filename,os.access(filename, os.R_OK) print 'list of root files in cwd' for f in os.listdir(os.getcwd()): if f[-5:] == '.root': print f,len(f),len(filename) fin = TFile.Open(os.getcwd() + '/' + filename) assert(fin) other = fin.Get(wsname) #pull unbinned data from other workspace self.loadDataFromWorkspace(other) #pull in histogram pdfs to save time self.loadHistogramsFromWorkspace(other) if getFloatPars and other.loadSnapshot('fitPars'): self.useImportPars = True self.ws.saveSnapshot('importParams', other.set('floatingParams'), True) # self.ws.Print() # put together a fitting model and return the pdf def makeFitter(self, useAlternateModels = False): if self.ws.pdf('total'): return self.ws.pdf('total') compPdfs = [] for component in self.pars.backgrounds: # print 'getting compModels' compModels = getattr(self.pars, '%sModels' % component) if hasattr(self.pars, '%sConvModels' % component): convModels = getattr(self.pars, '%sConvModels' % component) else: convModels = None if useAlternateModels: print 'loading Alternate Models' compModels = getattr(self.pars, '%sModelsAlt' % component) convModels = getattr(self.pars, '%sConvModelsAlt' % component) # print 'compModels = %s' % compModels compFiles = getattr(self.pars, '%sFiles' % component) compPdf = self.makeComponentPdf(component, compFiles, compModels, useAlternateModels, convModels) norm = self.ws.factory('prod::f_%s_norm' % component + \ '(n_%s[0.,1e6],' % component + \ '%s_nrm[1.,-0.5,5.])' % component) self.ws.var('n_%s' % component).setConstant(True) if hasattr(self, '%sExpected' % component): self.ws.var('n_%s' % component).setVal( getattr(self, '%sExpected' % component)) compPdfs.append( self.ws.factory('RooExtendPdf::%s_extended(%s,%s)' % \ (compPdf.GetName(), compPdf.GetName(), norm.GetName()) ) ) self.ws.factory('r_signal[0., -200., 200.]') self.ws.var('r_signal').setConstant(False) try: obs = [ self.pars.varNames[x] for x in self.pars.var ] except AttributeError: obs = self.pars.var for component in self.pars.signals: compFile = getattr(self.pars, '%sFiles' % component) compModels = getattr(self.pars, '%sModels' % component) if hasattr(self.pars, '%sConvModels' % component): convModels = getattr(self.pars, '%sConvModels' % component) else: convModels = None compPdf = self.makeComponentPdf(component, compFiles, compModels, useAlternateModels, convModels) norm = self.ws.factory( "prod::f_%s_norm(n_%s[0., 1e6],r_signal)" % \ (component, component) ) self.ws.var('n_%s' % component).setConstant(True) if hasattr(self, '%sExpected' % component): self.ws.var('n_%s' % component).setVal( getattr(self, '%sExpected' % component)) pdf = self.ws.factory('RooExtendPdf::%s_extended(%s,%s)' % \ (compPdf.GetName(), compPdf.GetName(), norm.GetName()) ) if (hasattr(self.pars, '%sInterference' % component)) and \ getattr(self.pars, '%sInterference' % component): getattr(self.ws, 'import') \ (pdf, RooFit.RenameAllNodes('interf_%sUp' % component), RooFit.RenameAllVariablesExcept('interf_%sUp' % component, ','.join(obs)), RooFit.Silence() ) getattr(self.ws, 'import') \ (pdf, RooFit.RenameAllNodes('interf_%sDown' % component), RooFit.RenameAllVariablesExcept('interf_%sDown'%component, ','.join(obs)), RooFit.Silence() ) if self.pars.includeSignal: compPdfs.append(pdf) #print compPdfs prodList = [ '%s' % (pdf.GetName()) \ for (idx, pdf) in enumerate(compPdfs) ] comps = RooArgList(self.ws.argSet(','.join(prodList))) getattr(self.ws, 'import')(RooAddPdf('total', 'total', comps)) return self.ws.pdf('total') # define the constraints on the yields, etc that will be part of the fit. def makeConstraints(self): if self.ws.set('constraintSet'): return self.ws.set('constraintSet') constraints = [] constrainedParameters = [] for constraint in self.pars.yieldConstraints: theYield = self.ws.var('%s_nrm' % constraint) if not theYield.isConstant(): self.ws.factory('RooGaussian::%s_const(%s, 1.0, %f)' % \ (constraint, theYield.GetName(), self.pars.yieldConstraints[constraint]) ) constraints.append('%s_const' % constraint) constrainedParameters.append(theYield.GetName()) if hasattr(self.pars, 'constrainShapes'): for component in self.pars.constrainShapes: pc = self.ws.pdf(component).getParameters(self.ws.set('obsSet')) parIter = pc.createIterator() par = parIter.Next() while par: if not par.isConstant(): theConst = self.ws.factory('RooGaussian::%s_const' % \ (par.GetName()) + \ '(%s, %f, %f)' % \ (par.GetName(), par.getVal(), par.getError()) ) constraints.append(theConst.GetName()) constrainedParameters.append(par.GetName()) par = parIter.Next() pc.IsA().Destructor(pc) self.ws.defineSet('constraintSet', ','.join(constraints)) self.ws.defineSet('constrainedSet', ','.join(constrainedParameters)) return self.ws.set('constraintSet') # fit the data using the pdf def fit(self, keepParameterValues = False): print 'construct fit pdf ...' fitter = self.makeFitter() print 'load data ...' data = self.loadData() self.resetYields() constraintSet = self.makeConstraints() if not keepParameterValues: self.readParametersFromFile() self.resetYields() # print constraints, self.pars.yieldConstraints print '\nfit constraints' constIter = constraintSet.createIterator() constraint = constIter.Next() constraints = [] while constraint: constraint.Print() constraints.append(constraint.GetName()) constraint = constIter.Next() constraintCmd = RooCmdArg.none() if constraintSet.getSize() > 0: constraints.append(fitter.GetName()) fitter = self.ws.pdf('totalFit_const') if not fitter: fitter = self.ws.factory('PROD::totalFit_const(%s)' % \ (','.join(constraints)) ) constraintCmd = RooFit.Constrained() # constraintCmd = RooFit.ExternalConstraints(self.ws.set('constraintSet')) if self.useImportPars: self.ws.loadSnapshot('importParams') self.ws.Print() # for constraint in pars.constraints: # self.ws.pdf(constraint).Print() # print rangeCmd = RooCmdArg.none() if self.rangeString and self.pars.doExclude: rangeCmd = RooFit.Range(self.rangeString) print 'fitting ...' fr = fitter.fitTo(data, RooFit.Save(True), RooFit.Extended(True), RooFit.Minos(False), RooFit.PrintEvalErrors(-1), RooFit.Warnings(False), constraintCmd, rangeCmd) fr.Print() return fr # determine the fitting model for each component and return them def makeComponentPdf(self, component, files, models, useAlternateModels, convModels): print 'making ComponentPdf %s' % component # print 'models = %s' % models # print 'files = %s' % files if convModels and not (convModels[0] == -1): thePdf = self.makeConvolvedPdf(component, files, models, useAlternateModels, convModels) elif (models[0] == -1): thePdf = self.makeComponentHistPdf(component, files) elif (models[0] == -2): thePdf = self.makeMorphingPdf(component, useAlternateModels, convModels) elif (models[0] == -3): pass else: thePdf = self.makeComponentAnalyticPdf(component, models, useAlternateModels) return thePdf #create a simple 2D histogram pdf def makeComponentHistPdf(self, component, files): if self.ws.pdf(component): return self.ws.pdf(component) compHist = self.utils.newEmptyHist('hist%s' % component) sumYields = 0. sumxsec = 0. sumExpected = 0. for (idx,fset) in enumerate(files): if hasattr(self.pars, '%scuts' % component): cutOverride = getattr(self.pars, '%scuts' % component) else: cutOverride = None filename = fset[0] tmpHist = self.utils.File2Hist(filename, 'hist%s_%i' % (component, idx), False,cutOverride,False,True,0) sumYields += tmpHist.Integral() sumxsec += fset[2] compHist.Add(tmpHist, self.pars.integratedLumi*fset[2]/fset[1]) sumExpected += tmpHist.Integral()*fset[2]* \ self.pars.integratedLumi/fset[1] print filename,'acc x eff: %.3g' % (tmpHist.Integral()/fset[1]) print filename,'N_expected: %.1f' % \ (tmpHist.Integral()*fset[2]*self.pars.integratedLumi/fset[1]) #tmpHist.Print() #compHist.Print() print '%s acc x eff: %.3g' % \ (component, sumExpected/sumxsec/self.pars.integratedLumi) print 'Number of expected %s events: %.1f' % (component, sumExpected) setattr(self, '%sExpected' % component, sumExpected) return self.utils.Hist2Pdf(compHist, component, self.ws, self.pars.order) #create a pdf which is a convolution of any two pdf def makeConvolvedPdf(self, component, files, models, useAlternateModels, convModels): if self.ws.pdf(component): return self.ws.pdf(component) #If a morphing model is selected, then convolve each individual component first and then morph if (models[0] == -2): return self.makeMorphingPdf(component, useAlternateModels, convModels) basePdf = self.makeComponentPdf('%s_base' % component, files, models, useAlternateModels, [-1]) convComponent = 'Global' ##Overwrite to use the same convolution model for all Pdfs convModel = getattr(self.pars, '%sConvModels' % convComponent) if useAlternateModels: convModel = getattr(self.pars, '%sConvModelsAlt' % convComponent) convPdf = self.makeComponentPdf('%s_conv' % convComponent, files, convModel, useAlternateModels, [-1]) var = self.pars.var[0] try: vName = self.pars.varNames[var] except AttributeError: vName = var self.ws.factory('RooFFTConvPdf::%s(%s,%s,%s)' % \ (component, vName, basePdf.GetName(), convPdf.GetName())) return self.ws.pdf(component) # create a pdf using the "template morphing" technique def makeMorphingPdf(self, component, useAlternateModels, convModels): if self.ws.pdf(component): return self.ws.pdf(component) filesNom = getattr(self.pars, '%s_NomFiles' % component) modelsNom = getattr(self.pars, '%s_NomModels' % component) filesMU = getattr(self.pars, '%s_MUFiles' % component) modelsMU = getattr(self.pars, '%s_MUModels' % component) filesMD = getattr(self.pars, '%s_MDFiles' % component) modelsMD = getattr(self.pars, '%s_MDModels' % component) filesSU = getattr(self.pars, '%s_SUFiles' % component) modelsSU = getattr(self.pars, '%s_SUModels' % component) filesSD = getattr(self.pars, '%s_SDFiles' % component) modelsSD = getattr(self.pars, '%s_SDModels' % component) if useAlternateModels: modelsNom = getattr(self.pars, '%s_NomModelsAlt' % component) modelsMU = getattr(self.pars, '%s_MUModelsAlt' % component) modelsMD = getattr(self.pars, '%s_MDModelsAlt' % component) modelsSU = getattr(self.pars, '%s_SUModelsAlt' % component) modelsSD = getattr(self.pars, '%s_SDModelsAlt' % component) # Adds five (sub)components for the component with suffixes Nom, MU, MD, SU, SD NomPdf = self.makeComponentPdf('%s_Nom' % component, filesNom, modelsNom, False, convModels) if hasattr(self, '%s_NomExpected' % component): setattr(self, '%sExpected' % component, getattr(self, '%s_NomExpected' % component)) MUPdf = self.makeComponentPdf('%s_MU' % component, filesMU, modelsMU, False, convModels) MDPdf = self.makeComponentPdf('%s_MD' % component, filesMD, modelsMD, False, convModels) SUPdf = self.makeComponentPdf('%s_SU' % component, filesSU, modelsSU, False, convModels) SDPdf = self.makeComponentPdf('%s_SD' % component, filesSD, modelsSD, False, convModels) fMU_comp = self.ws.factory("fMU_%s[0., -1., 1.]" % component) fSU_comp = self.ws.factory("fSU_%s[0., -1., 1.]" % component) fMU = RooFormulaVar("f_fMU_%s" % component, "1.0*@0*(@0 >= 0.)", RooArgList( fMU_comp ) ) fMD = RooFormulaVar("f_fMD_%s" % component, "-1.0*@0*(@0 < 0.)", RooArgList( fMU_comp ) ) fSU = RooFormulaVar("f_fSU_%s" % component, "@0*(@0 >= 0.)", RooArgList( fSU_comp ) ) fSD = RooFormulaVar("f_fSD_%s" % component, "@0*(-1)*(@0 < 0.)", RooArgList( fSU_comp ) ) fNom = RooFormulaVar("f_fNom_%s" % component, "(1.-abs(@0)-abs(@1))", RooArgList(fMU_comp,fSU_comp) ) morphPdf = RooAddPdf(component,component, RooArgList(MUPdf,MDPdf,SUPdf,SDPdf,NomPdf), RooArgList(fMU, fMD, fSU, fSD, fNom)) morphPdf.SetName(component) getattr(self.ws, 'import')(morphPdf) return self.ws.pdf(component) # create a pdf using an analytic function. def makeComponentAnalyticPdf(self, component, models, useAlternateModels): if self.ws.pdf(component): return self.ws.pdf(component) pdfList = [] for (idx,model) in enumerate(models): var = self.pars.var[idx] try: vName = self.pars.varNames[var] except AttributeError: vName = var auxModel = None if useAlternateModels: if hasattr(self.pars, '%sAuxModelsAlt' % component): auxModel = getattr(self.pars, '%sAuxModelsAlt' % component)[idx] else: if hasattr(self.pars, '%sAuxModels' % component): auxModel = getattr(self.pars, '%sAuxModels' % component)[idx] pdfList.append(self.utils.analyticPdf(self.ws, vName, model, '%s_%s'%(component,vName), '%s_%s'%(component,vName), auxModel ) ) pdfListNames = [ pdf.GetName() for pdf in pdfList ] if len(pdfList) > 1: self.ws.factory('PROD::%s(%s)' % \ (component, ','.join(pdfListNames))) else: pdfList[0].SetName(component) return self.ws.pdf(component) def loadData(self, weight = False): if self.ws.data('data_obs'): return self.ws.data('data_obs') unbinnedName = 'data_obs' if self.pars.binData: unbinnedName = 'data_unbinned' data = self.utils.File2Dataset(self.pars.DataFile, unbinnedName, self.ws, weighted = weight) if self.pars.binData: data = RooDataHist('data_obs', 'data_obs', self.ws.set('obsSet'), data) getattr(self.ws, 'import')(data) data = self.ws.data('data_obs') return data def stackedPlot(self, var, logy = False, pdfName = None, Silent = False): if not pdfName: pdfName = 'total' xvar = self.ws.var(var) nbins = xvar.getBins() if hasattr(self.pars, 'plotRanges'): xvar.setRange('plotRange', self.pars.plotRanges[var][1], self.pars.plotRanges[var][2]) xvar.setBins(self.pars.plotRanges[var][0], 'plotBins') else: xvar.setRange('plotRange', xvar.getMin(), xvar.getMax()) xvar.setBins(nbins, 'plotBins') sframe = xvar.frame() sframe.SetName("%s_stacked" % var) pdf = self.ws.pdf(pdfName) if isinstance(pdf, RooAddPdf): compList = RooArgList(pdf.pdfList()) else: compList = None data = self.ws.data('data_obs') nexp = pdf.expectedEvents(self.ws.set('obsSet')) if not Silent: print pdf.GetName(),'expected: %.0f' % (nexp) print 'data events: %.0f' % (data.sumEntries()) if nexp < 1: nexp = data.sumEntries() theComponents = [] if self.pars.includeSignal: theComponents += self.pars.signals theComponents += self.pars.backgrounds data.plotOn(sframe, RooFit.Invisible(), RooFit.Binning('plotBins')) # dataHist = RooAbsData.createHistogram(data,'dataHist_%s' % var, xvar, # RooFit.Binning('%sBinning' % var)) # #dataHist.Scale(1., 'width') # invData = RooHist(dataHist, 1., 1, RooAbsData.SumW2, 1.0, False) # #invData.Print('v') # sframe.addPlotable(invData, 'pe', True, True) for (idx,component) in enumerate(theComponents): if not Silent: print 'plotting',component,'...', if hasattr(self.pars, '%sPlotting' % (component)): plotCharacteristics = getattr(self.pars, '%sPlotting' % \ (component)) else: plotCharacteristics = {'color' : colorwheel[idx%6], 'title' : component } compCmd = RooCmdArg.none() if compList: compSet = RooArgSet(compList) if compSet.getSize() > 0: compCmd = RooFit.Components(compSet) removals = compList.selectByName('%s*' % component) compList.remove(removals) if not Silent: print 'events', self.ws.function('f_%s_norm' % component).getVal() sys.stdout.flush() if abs(self.ws.function('f_%s_norm' % component).getVal()) >= 1.: pdf.plotOn(sframe, #RooFit.ProjWData(data), RooFit.DrawOption('LF'), RooFit.FillStyle(1001), RooFit.FillColor(plotCharacteristics['color']), RooFit.LineColor(plotCharacteristics['color']), RooFit.VLines(), RooFit.Range('plotRange'), RooFit.NormRange('plotRange'), RooFit.Normalization(nexp, RooAbsReal.NumEvent), compCmd ) tmpCurve = sframe.getCurve() tmpCurve.SetName(component) tmpCurve.SetTitle(plotCharacteristics['title']) if 'visible' in plotCharacteristics: sframe.setInvisible(component, plotCharacteristics['visible']) data.plotOn(sframe, RooFit.Name('theData'), RooFit.Binning('plotBins')) sframe.getHist('theData').SetTitle('data') # theData = RooHist(dataHist, 1., 1, RooAbsData.SumW2, 1.0, True) # theData.SetName('theData') # theData.SetTitle('data') # sframe.addPlotable(theData, 'pe') if (logy): sframe.SetMinimum(0.01) sframe.SetMaximum(1.0e6) else: sframe.SetMaximum(sframe.GetMaximum()*1.35) pass excluded = (var in self.pars.exclude) bname = var if not excluded: for v in self.pars.exclude: if hasattr(self.pars, 'varNames') and \ (self.pars.varNames[v] == var): excluded = True bname = v if excluded: blinder = TBox(self.pars.exclude[bname][0], sframe.GetMinimum(), self.pars.exclude[bname][1], sframe.GetMaximum()) # blinder.SetName('blinder') # blinder.SetTitle('signal region') blinder.SetFillColor(kBlack) if self.pars.blind: blinder.SetFillStyle(1001) else: blinder.SetFillStyle(0) blinder.SetLineStyle(2) sframe.addObject(blinder) elif self.pars.blind: if not Silent: print "blind but can't find exclusion region for", var print 'excluded',excluded,self.pars.exclude print 'hiding data points' sframe.setInvisible('theData', True) #sframe.GetYaxis().SetTitle('Events / GeV') # dataHist.IsA().Destructor(dataHist) if not Silent: print xvar.setBins(nbins) return sframe def readParametersFromFile(self, fname=None): if (not fname): fname = self.pars.initialParametersFile if isinstance(fname, str): flist = [ fname ] else: flist = fname for tmpName in flist: if len(tmpName) > 0: print 'loading parameters from file',tmpName self.ws.allVars().readFromFile(tmpName) def expectedFromPars(self): components = self.pars.signals + self.pars.backgrounds for component in components: theYield = self.ws.var('n_%s' % component) setattr(self, '%sExpected' % component, theYield.getVal()) def initFromExplicitVals(self,opts): #,init_diboson= -1.0,init_WpJ=-1.0,init_top=-1.0,init_ZpJ=-1.0,init_QCD=-1.0 components = ['diboson', 'top', 'WpJ', 'ZpJ', 'QCD', 'WHbb'] for component in components: #double init init = getattr(opts, 'ext%s' % component) #init = -2.0 #setattr(self,init, 'init_%s' % component) #init = init_%s % component #print "init=", init #init = self.ws.var('init_%s' % component) #init.setVal(100.0) #init.setVal('init_%s' % component) #init = theYield.getVal() if (init>0.): print 'setting initial value for ',component,' to ',init setattr(self, '%sInitial' % component, init) def resetYields(self): if self.ws.data('data_obs'): Ndata = self.ws.data('data_obs').sumEntries() else: Ndata = 10000. print 'resetting yields...' components = self.pars.signals + self.pars.backgrounds for component in components: theYield = self.ws.var('n_%s' % component) theNorm = self.ws.var('%s_nrm' % component) if hasattr(self, '%sInitial' % component): print 'explicitly setting initial value for ',component theYield.setVal(getattr(self, '%sInitial' % component)) theNorm.setVal(1.0) theNorm.setConstant() else: fracofdata = -1. if hasattr(self.pars, '%sFracOfData' % component): fracofdata = getattr(self.pars, '%sFracOfData' % component) if (fracofdata >= 0.): print 'explicitly setting ', component,' yield to be', fracofdata,' of data' theYield.setVal(fracofdata*Ndata) elif hasattr(self, '%sExpected' % component): theYield.setVal(getattr(self, '%sExpected' % component)) else: print 'no expected value for',component theYield.setVal(Ndata/len(components)) if theNorm and not theNorm.isConstant(): theNorm.setVal(1.0) if component in self.pars.yieldConstraints: theYield.setError(theYield.getVal() * \ self.pars.yieldConstraints[component]) if theNorm: theNorm.setError(self.pars.yieldConstraints[component]) else: theYield.setError(sqrt(theYield.getVal())) theYield.Print() def generateToyMCSet(self,var,inputPdf,outFileName,NEvts): fMC = TFile(outFileName, "RECREATE"); # thevar = self.ws.var(var); print 'thevar=' print var # print thevar print '...' # varList = RooArgList() # varList.add(self.ws.var(var)) toymc = inputPdf.generate(RooArgSet(self.ws.var(var)),NEvts); tMC = toymc.tree(); fMC.cd(); tMC.Write(); fMC.Close(); def legend4Plot(plot, left = False): if left: theLeg = TLegend(0.2, 0.62, 0.55, 0.92, "", "NDC") else: theLeg = TLegend(0.60, 0.62, 0.92, 0.92, "", "NDC") theLeg.SetName('theLegend') theLeg.SetBorderSize(0) theLeg.SetLineColor(0) theLeg.SetFillColor(0) theLeg.SetFillStyle(0) theLeg.SetLineWidth(0) theLeg.SetLineStyle(0) theLeg.SetTextFont(42) theLeg.SetTextSize(.045) entryCnt = 0 for obj in range(0, int(plot.numItems())): objName = plot.nameOf(obj) if (not plot.getInvisible(objName)): theObj = plot.getObject(obj) objTitle = theObj.GetTitle() if len(objTitle) < 1: objTitle = objName dopts = plot.getDrawOptions(objName).Data() # print 'obj:',theObj,'title:',objTitle,'opts:',dopts,'type:',type(dopts) if theObj.IsA().InheritsFrom('TNamed'): theLeg.AddEntry(theObj, objTitle, dopts) entryCnt += 1 theLeg.SetY1NDC(0.9 - 0.05*entryCnt - 0.005) theLeg.SetY1(theLeg.GetY1NDC()) return theLeg legend4Plot = staticmethod(legend4Plot)
from JPsi.MuMu.common.energyScaleChains import getChains from JPsi.MuMu.datadrivenbinning import DataDrivenBinning gSystem.Load('libJPsiMuMu') gROOT.LoadMacro("tools.C+") try: setattr(Workspace, "Import", getattr(Workspace, "import")) except NameError: ## Try again :-) setattr(Workspace, "Import", getattr(Workspace, "import")) w = Workspace('w', 'w') ## 1. Build a gaussian model g(x|m,s) g = w.factory('Gaussian::g(x[-10,10],gm[0,-3,3],gs[1,0.1,3])') x = w.var('x') ## 2. Generate unbinned data from g 1 and 2 with (s,m) = (s1,m1) and (m2,s2) data1 = g.generate(ArgSet(x), 10000) w.var('gm').setVal(-3.) w.var('gs').setVal(2) data2 = g.generate(ArgSet(x), 10000) w.var('gm').setVal(0.) w.var('gs').setVal(1.) ## 3. Use data 1 to build Keys PDF k1(x|s1,m1) k = KeysPdf('k', 'k', x, data1, KeysPdf.NoMirror, 2)
massVmin = 0.01 massVmax = 1.0 nPiMass = 1 massPimin = 0.01 massPimax = 1.0 fitfunc = TF1("fitfunc","[0]*exp( ((x-[1])<[3])*(-0.5*(x-[1])^2/[2]^2) + ((x-[1])>=[3])*(-0.5*[3]^2/[2]^2-(x-[1]-[3])/[4]))",-50,50) fitfunc.SetParName(0,"Amplitude") fitfunc.SetParName(1,"Mean") fitfunc.SetParName(2,"Sigma") fitfunc.SetParName(3,"Tail Z") fitfunc.SetParName(4,"Tail length") w = RooWorkspace("w") w.factory("{0}[0,0.1]".format("uncM")) w.factory("uncVZ[-100,100]") w.factory("uncP[0,10]") w.factory("cut[0,1]") w.defineSet("myVars","{0},uncVZ".format("uncM")) events = infile.Get("cut") eventsrad = radfile.Get("ntuple") dataset = RooDataSet("data","data",events,w.set("myVars"),"") w.factory("Gaussian::vtx_model(uncVZ,mean[-50,50],sigma[0,50])") gauss_pdf = w.pdf("vtx_model") w.factory("EXPR::gaussExp('exp( ((@0-@1)<@3)*(-0.5*(@0-@1)^2/@2^2) + ((@0-@1)>=@3)*(-0.5*@3^2/@2^2-(@0-@1-@3)/@4))',uncVZ,gauss_mean[-5,-20,20],gauss_sigma[5,1,50],exp_breakpoint[10,0,50],exp_length[3,0.5,20])") gaussexp_pdf = w.pdf("gaussExp") w.defineSet("obs_1d","uncVZ")
def DataFit(free=True, sim=True): w = RooWorkspace('w_data', 'w_data') samples = ['b0g', 'b0pi0', 'bsg', 'bspi0'] # create the category w.factory('cat[%s]' % (','.join(samples))) files = { 'b0g': path + "B0gamma/9_B2DstKpi_Dst2DgammTuple_BestCut.root", 'b0pi0': path + "B0pi0/9_B2Dstpi0Tuple_BestCut.root", 'bsg': path + "Bsgamma/9_Bs2DstKpi_Dst2DgammaTuple_BestCut.root", 'bspi0': path + "Bspi0/9_Bs2Dstpi0Tuple_BestCut.root" } # Make the dsets w.factory("B_DTFDict_D0_B_M[5100,5900]") w.var("B_DTFDict_D0_B_M").setBins(80) for samp in samples: assert (os.path.exists(files[samp])) tf = TFile(files[samp]) t = tf.Get('DecayTree') t.SetBranchStatus("*", 0) t.SetBranchStatus("B_DTFDict_D0_B_M", 1) dset = RooDataSet("data_%s" % (samp), "", t, RooArgSet(w.var("B_DTFDict_D0_B_M"))) getattr(w, 'import')(dset) tf.Close() # Make the total pdf # First try and merge the different bits from the different workspaces ImportMCShapes(w) # Then make combinatorial shape in each cateogry # let these be independent for now for samp in samples: w.factory("comb_mc_%s_p0[-0.001,-0.1,0.]" % samp) w.factory( "Exponential::comb_mc_pdf_%s( B_DTFDict_D0_B_M, comb_mc_%s_p0 )" % (samp, samp)) w.factory("%s_comb_y[3000,0,12000]" % samp) # Now need to figure out what yields to restrict # sig yield first (require b0 / bs ratio consistent between g and pi0) w.factory("b0g_sig_y[3000,0,12000]") w.factory("b0pi0_sig_y[800,0,4000]") w.factory("bs2b0_rat[2.5,1.,4.]") w.factory("prod::bsg_sig_y(b0g_sig_y, bs2b0_rat)") w.factory("prod::bspi0_sig_y(b0pi0_sig_y, bs2b0_rat)") # now mis rec yield (ratio of this to sig should be the same for b0 and bs but will be different for g vs pi0) w.factory("misrec_to_sig_rat_g[0.2,0.001,0.6]") w.factory("misrec_to_sig_rat_pi0[0.2,0.001,0.6]") w.factory("prod::b0g_misrec_y( misrec_to_sig_rat_g, b0g_sig_y )") w.factory("prod::b0pi0_misrec_y( misrec_to_sig_rat_pi0, b0pi0_sig_y )") w.factory("prod::bsg_misrec_y( misrec_to_sig_rat_g, bsg_sig_y )") w.factory("prod::bspi0_misrec_y( misrec_to_sig_rat_pi0, bspi0_sig_y )") # the cases of B->D*pipi, B->D*KK, Lb->D*ph all involve a misID so will # be different for B0 and Bs (as they differ with a K or pi misID) however # for all of these the ratio of g -> pi0 should be the same # there is also Bs->D*KK which should scale the same for g and pi0 modes w.factory("misid_g2pi0_rat[0.1,0.0001,10.]") w.factory("b0g_bdstpp_y[1000,0,12000]") w.factory("bsg_bdstpp_y[1000,0,12000]") w.factory("prod::b0pi0_bdstpp_y( misid_g2pi0_rat, b0g_bdstpp_y )") w.factory("prod::bspi0_bdstpp_y( misid_g2pi0_rat, bsg_bdstpp_y )") w.factory("b0g_bdstkk_y[1000,0,12000]") w.factory("bsg_bdstkk_y[1000,0,12000]") w.factory("prod::b0pi0_bdstkk_y( misid_g2pi0_rat, b0g_bdstkk_y )") w.factory("prod::bspi0_bdstkk_y( misid_g2pi0_rat, bsg_bdstkk_y )") w.factory("b0g_lbdstph_y[1000,0,12000]") w.factory("bsg_lbdstph_y[1000,0,12000]") w.factory("prod::b0pi0_lbdstph_y( misid_g2pi0_rat, b0g_lbdstph_y )") w.factory("prod::bspi0_lbdstph_y( misid_g2pi0_rat, bsg_lbdstph_y )") w.factory("bsdstkk_to_bdstkk_rat[1.,0.1,2.]") w.factory("prod::b0g_bsdstkk_y( bsdstkk_to_bdstkk_rat, b0g_bdstkk_y )") w.factory("prod::b0pi0_bsdstkk_y( bsdstkk_to_bdstkk_rat, b0pi0_bdstkk_y )") w.factory("prod::bsg_bsdstkk_y( bsdstkk_to_bdstkk_rat, bsg_bdstkk_y )") w.factory("prod::bspi0_bsdstkk_y( bsdstkk_to_bdstkk_rat, bspi0_bdstkk_y )") # B -> DKpi same logic as misrec w.factory("bdkp_to_sig_rat_g[0.2,0.001,0.6]") w.factory("bdkp_to_sig_rat_pi0[0.2,0.001,0.6]") w.factory("prod::b0g_bdkp_y( bdkp_to_sig_rat_g, b0g_sig_y )") w.factory("prod::b0pi0_bdkp_y( bdkp_to_sig_rat_pi0, b0pi0_sig_y )") w.factory("prod::bsg_bdkp_y( bdkp_to_sig_rat_g, bsg_sig_y )") w.factory("prod::bspi0_bdkp_y( bdkp_to_sig_rat_pi0, bspi0_sig_y )") # B -> D* K / B -> D* pi (adding random pi- to B0 and random K- to Bs0) # so ratio to signal should be same for both g and pi0 modes but # different for B0 -> Bs w.factory("bdsth_to_sig_rat_addpi[0.2,0.001,0.6]") w.factory("bdsth_to_sig_rat_addk[0.2,0.001,0.6]") w.factory("prod::b0g_bdsth_y( bdsth_to_sig_rat_addpi, b0g_sig_y )") w.factory("prod::b0pi0_bdsth_y( bdsth_to_sig_rat_addpi, b0pi0_sig_y )") w.factory("prod::bsg_bdsth_y( bdsth_to_sig_rat_addk, bsg_sig_y )") w.factory("prod::bspi0_bdsth_y( bdsth_to_sig_rat_addk, bspi0_sig_y )") # Lb -> Dph (mid-ID k for p and pi for p and add random g or pi0) so will be different for all 4 really # express this ratio to the Lb -> D*ph one (they should be similar in magnitude?) w.factory("lbdph_to_lbdstph_b0g[1.,0.5,2.]") w.factory("lbdph_to_lbdstph_b0pi0[1.,0.5,2.]") w.factory("lbdph_to_lbdstph_bsg[1.,0.5,2.]") w.factory("lbdph_to_lbdstph_bspi0[1.,0.5,2.]") w.factory("prod::b0g_lbdph_y( lbdph_to_lbdstph_b0g, b0g_lbdstph_y )") w.factory("prod::b0pi0_lbdph_y( lbdph_to_lbdstph_b0pi0, b0pi0_lbdstph_y )") w.factory("prod::bsg_lbdph_y( lbdph_to_lbdstph_bsg, bsg_lbdstph_y )") w.factory("prod::bspi0_lbdph_y( lbdph_to_lbdstph_bspi0, bspi0_lbdstph_y )") # Part reco shape should have same Bs / B0 ratio w.factory("partrec_to_sig_rat_g[0.2,0.001,0.6]") w.factory("partrec_to_sig_rat_pi0[0.2,0.001,0.6]") w.factory("prod::b0g_partrec_y( partrec_to_sig_rat_g, b0g_sig_y )") w.factory("prod::b0pi0_partrec_y( partrec_to_sig_rat_pi0, b0pi0_sig_y )") w.factory("prod::bsg_partrec_y( partrec_to_sig_rat_g, bsg_sig_y )") w.factory("prod::bspi0_partrec_y( partrec_to_sig_rat_pi0, bspi0_sig_y )") components = [ 'sig', 'misrec', 'bdstpp', 'bdstkk', 'bsdstkk', 'bdkp', 'bdsth', 'partrec', 'lbdph', 'lbdstph', 'comb' ] for samp in samples: fact_str = "SUM::data_pdf_%s(" % samp for comp in components: fact_str += "%s_%s_y*%s_mc_pdf_%s," % (samp, comp, comp, samp) fact_str = fact_str[:-1] + ")" w.factory(fact_str) w.pdf('data_pdf_%s' % samp).Print('v') CreateSimPdf(w, 'data') CreateSimData(w, 'data') # Now fix appropriate parameters # To start with we'll fix all shape parameters from MC and just float the yields (and exponential slope) for comp in components: if comp == 'comb': continue # no pre-defined shape for combinatorial if comp == 'bsdstkk': continue # this params for this piece are covered by bdstkk w.set('%s_mc_sim_pdf_pars' % comp).setAttribAll("Constant") # Now relax the constraints on a few important params w.var("b0g_mean").setConstant(False) w.var("b0g_sigma").setConstant(False) #w.var("dm_b02bs").setConstant(False) #w.var("dm_g2pi0").setConstant(False) #w.var("ssig_b02bs").setConstant(False) #w.var("ssig_g2pi0").setConstant(False) #w.var("b0g_misrec_mean").setConstant(False) #w.var("b0g_misrec_sigma").setConstant(False) #w.var("dm_missg2addg").setConstant(False) #w.var("ssig_missg2addg").setConstant(False) w.Print('v') w.pdf('data_sim_pdf').Print('v') w.data('data_sim_data').Print('v') # free fit first if free: for i, samp in enumerate(samples): pdfname = 'data_pdf_%s' % (samp) dsetname = 'data_%s' % (samp) w.pdf(pdfname).fitTo( w.data(dsetname)) # nothing to fit in this case pars = w.pdf(pdfname).getParameters( RooArgSet(w.var("B_DTFDict_D0_B_M"))) w.saveSnapshot('data_free_fit_%s' % samp, pars) if sim: pdfname = 'data_sim_pdf' dsetname = 'data_sim_data' w.pdf(pdfname).fitTo(w.data(dsetname)) pars = w.pdf(pdfname).getParameters( RooArgSet(w.var("B_DTFDict_D0_B_M"))) w.saveSnapshot('data_sim_fit', pars) w.writeToFile('files/w_data.root')
from ROOT import RooWorkspace workspace = RooWorkspace("electron_channel_2orMoreBtags") workspace.factory('lepton_AbsoluteEta[0]') workspace.factory('lumi[0]') workspace.factory('n_signal[2200,0,10000]') workspace.factory('n_VPlusJets[200,0,10000]') workspace.factory('n_QCD[10,0,10000]') workspace.factory('sum::yield(n_signal,n_VPlusJets,n_QCD)') workspace.factory( "Poisson::model_core(n,yield)" ) workspace.factory( "lumi[0]" ); # cross section - parameter of interest workspace.factory( "xsec[0,0,0.1]" ); # selection efficiency * acceptance workspace.factory( "efficiency[0]" ); # signal yield workspace.factory( "prod::nsig(lumi,xsec,efficiency)" ); workspace.factory( "Uniform::prior(xsec)" ) workspace.Print() workspace.SaveAs('electron_channel_2orMoreBtags.root')
class Wjj2DFitter: def __init__ (self, pars): self.pars = pars self.ws = RooWorkspace('wjj2dfitter') self.utils = Wjj2DFitterUtils(self.pars) self.useImportPars = False self.rangeString = None obs = [] for v in self.pars.var: try: vName = self.pars.varNames[v] except AttributeError: vName = v obs.append(vName) var1 = self.ws.factory('%s[%f,%f]' % (vName, self.pars.varRanges[v][1], self.pars.varRanges[v][2]) ) var1.setUnit('GeV') try: var1.SetTitle(self.pars.varTitles[v]) except AttributeError: var1.SetTitle('m_{jj}') var1.setPlotLabel(var1.GetTitle()) if len(self.pars.varRanges[v][3]) > 1: vbinning = RooBinning(len(self.pars.varRanges[v][3]) - 1, array('d', self.pars.varRanges[v][3]), '%sBinning' % vName) var1.setBinning(vbinning) else: var1.setBins(self.pars.varRanges[v][0]) var1.Print() if v in self.pars.exclude: var1.setRange('signalRegion', self.pars.exclude[v][0], self.pars.exclude[v][1]) var1.setRange('lowSideband', var1.getMin(), self.pars.exclude[v][0]) var1.setRange('highSideband', self.pars.exclude[v][1], var1.getMax()) self.rangeString = 'lowSideband,highSideband' if hasattr(self.pars, 'plotRanges'): var1.setRange('plotRange', self.pars.plotRanges[v][1], self.pars.plotRanges[v][2]) var1.setBins(self.pars.plotRanges[v][0], 'plotBins') else: var1.setRange('plotRange', var1.getMin(), var1.getMax()) var1.setBins(var1.getBins(), 'plotBins') self.ws.defineSet('obsSet', ','.join(obs)) def loadDataFromWorkspace(self, other, cut = None): #pull unbinned data from other workspace unbinnedData = other.data('data_unbinned') if not unbinnedData: unbinnedData = other.data('data_obs') if cut: unbinnedData = unbinnedData.reduce(cut) unbinnedData.Print() if self.pars.binData: #bin and import data unbinnedData.SetName('data_unbinned') getattr(self.ws, 'import')(unbinnedData) data = RooDataHist('data_obs', 'data_obs', other.set('obsSet'), unbinnedData) getattr(self.ws, 'import')(data) else: #just import data unbinnedData.SetName('data_obs') getattr(self.ws, 'import')(unbinnedData) def loadHistogramsFromWorkspace(self, other): #pull RooHist pdfs from other workspace pdfs = other.allPdfs() pdfIter = pdfs.createIterator() pdf = pdfIter.Next() while pdf: if pdf.IsA().InheritsFrom('RooHistPdf'): print 'importing',pdf.GetName(),'from old workspace' getattr(self.ws, 'import')(pdf) pdf = pdfIter.Next() def loadWorkspaceFromFile(self, filename, wsname = 'w', getFloatPars = True): print 'loading data workspace %s from file %s' % (wsname, filename) fin = TFile.Open(filename) if not fin: print 'failed to open the file',filename import os print 'cwd:',os.getcwd() print 'access of',filename,os.access(filename, os.R_OK) print 'list of root files in cwd' for f in os.listdir(os.getcwd()): if f[-5:] == '.root': print f,len(f),len(filename) fin = TFile.Open(os.getcwd() + '/' + filename) assert(fin) other = fin.Get(wsname) #pull unbinned data from other workspace self.loadDataFromWorkspace(other) #pull in histogram pdfs to save time self.loadHistogramsFromWorkspace(other) if getFloatPars and other.loadSnapshot('fitPars'): self.useImportPars = True self.ws.saveSnapshot('importParams', other.set('floatingParams'), True) # self.ws.Print() # put together a fitting model and return the pdf def makeFitter(self, useAlternateModels = False): if self.ws.pdf('total'): return self.ws.pdf('total') compPdfs = [] for component in self.pars.backgrounds: # print 'getting compModels' compModels = getattr(self.pars, '%sModels' % component) if hasattr(self.pars, '%sConvModels' % component): convModels = getattr(self.pars, '%sConvModels' % component) else: convModels = None if useAlternateModels: print 'loading Alternate Models' compModels = getattr(self.pars, '%sModelsAlt' % component) convModels = getattr(self.pars, '%sConvModelsAlt' % component) # print 'compModels = %s' % compModels compFiles = getattr(self.pars, '%sFiles' % component) compPdf = self.makeComponentPdf(component, compFiles, compModels, useAlternateModels, convModels) norm = self.ws.factory('prod::f_%s_norm' % component + \ '(n_%s[0.,1e6],' % component + \ '%s_nrm[1.,-0.5,5.])' % component) self.ws.var('n_%s' % component).setConstant(True) if hasattr(self, '%sExpected' % component): self.ws.var('n_%s' % component).setVal( getattr(self, '%sExpected' % component)) compPdfs.append( self.ws.factory('RooExtendPdf::%s_extended(%s,%s)' % \ (compPdf.GetName(), compPdf.GetName(), norm.GetName()) ) ) self.ws.factory('r_signal[0., -200., 200.]') self.ws.var('r_signal').setConstant(False) try: obs = [ self.pars.varNames[x] for x in self.pars.var ] except AttributeError: obs = self.pars.var for component in self.pars.signals: compFile = getattr(self.pars, '%sFiles' % component) compModels = getattr(self.pars, '%sModels' % component) if hasattr(self.pars, '%sConvModels' % component): convModels = getattr(self.pars, '%sConvModels' % component) else: convModels = None compPdf = self.makeComponentPdf(component, compFiles, compModels, useAlternateModels, convModels) norm = self.ws.factory( "prod::f_%s_norm(n_%s[0., 1e6],r_signal)" % \ (component, component) ) self.ws.var('n_%s' % component).setConstant(True) if hasattr(self, '%sExpected' % component): self.ws.var('n_%s' % component).setVal( getattr(self, '%sExpected' % component)) pdf = self.ws.factory('RooExtendPdf::%s_extended(%s,%s)' % \ (compPdf.GetName(), compPdf.GetName(), norm.GetName()) ) if (hasattr(self.pars, '%sInterference' % component)) and \ getattr(self.pars, '%sInterference' % component): getattr(self.ws, 'import') \ (pdf, RooFit.RenameAllNodes('interf_%sUp' % component), RooFit.RenameAllVariablesExcept('interf_%sUp' % component, ','.join(obs)), RooFit.Silence() ) getattr(self.ws, 'import') \ (pdf, RooFit.RenameAllNodes('interf_%sDown' % component), RooFit.RenameAllVariablesExcept('interf_%sDown'%component, ','.join(obs)), RooFit.Silence() ) if self.pars.includeSignal: compPdfs.append(pdf) #print compPdfs prodList = [ '%s' % (pdf.GetName()) \ for (idx, pdf) in enumerate(compPdfs) ] comps = RooArgList(self.ws.argSet(','.join(prodList))) getattr(self.ws, 'import')(RooAddPdf('total', 'total', comps)) return self.ws.pdf('total') # define the constraints on the yields, etc that will be part of the fit. def makeConstraints(self): if self.ws.set('constraintSet'): return self.ws.set('constraintSet') constraints = [] constrainedParameters = [] for constraint in self.pars.yieldConstraints: theYield = self.ws.var('%s_nrm' % constraint) if not theYield.isConstant(): self.ws.factory('RooGaussian::%s_const(%s, 1.0, %f)' % \ (constraint, theYield.GetName(), self.pars.yieldConstraints[constraint]) ) constraints.append('%s_const' % constraint) constrainedParameters.append(theYield.GetName()) if hasattr(self.pars, 'constrainShapes'): for component in self.pars.constrainShapes: pc = self.ws.pdf(component).getParameters(self.ws.set('obsSet')) parIter = pc.createIterator() par = parIter.Next() while par: if not par.isConstant(): theConst = self.ws.factory('RooGaussian::%s_const' % \ (par.GetName()) + \ '(%s, %f, %f)' % \ (par.GetName(), par.getVal(), par.getError()) ) constraints.append(theConst.GetName()) constrainedParameters.append(par.GetName()) par = parIter.Next() pc.IsA().Destructor(pc) self.ws.defineSet('constraintSet', ','.join(constraints)) self.ws.defineSet('constrainedSet', ','.join(constrainedParameters)) return self.ws.set('constraintSet') # make the constrained fitter def makeConstrainedFitter(self): if self.ws.pdf('totalFit_const'): return self.ws.pdf('totalFit_const') constraintSet = self.makeConstraints() fitter = self.makeFitter() print '\nfit constraints' constIter = constraintSet.createIterator() constraint = constIter.Next() constraints = [] while constraint: constraint.Print() constraints.append(constraint.GetName()) constraint = constIter.Next() if constraintSet.getSize() > 0: constraints.append(fitter.GetName()) fitter = self.ws.factory('PROD::totalFit_const(%s)' % \ (','.join(constraints)) ) return fitter # fit the data using the pdf def fit(self, keepParameterValues = False, overrideRangeCmd = False): print 'construct fit pdf ...' fitter = self.makeFitter() print 'load data ...' data = self.loadData() self.resetYields() constraintSet = self.makeConstraints() if not keepParameterValues: self.readParametersFromFile() self.resetYields() # print constraints, self.pars.yieldConstraints constraintCmd = RooCmdArg.none() if constraintSet.getSize() > 0: fitter = self.makeConstrainedFitter() constraintCmd = RooFit.Constrained() # constraintCmd = RooFit.ExternalConstraints(self.ws.set('constraintSet')) if self.useImportPars: self.ws.loadSnapshot('importParams') self.ws.Print() # for constraint in pars.constraints: # self.ws.pdf(constraint).Print() # print rangeCmd = RooCmdArg.none() if self.rangeString and self.pars.doExclude and not overrideRangeCmd: rangeCmd = RooFit.Range(self.rangeString) # print 'scanning parameter values...' # fitter.fitTo(data, RooFit.Minos(False), # RooFit.PrintEvalErrors(-1), # RooFit.Warnings(False), # RooFit.Minimizer("Minuit2", "scan"), # RooFit.PrintLevel(0), # constraintCmd, # rangeCmd) print 'fitting ...' fr = fitter.fitTo(data, RooFit.Save(True), # RooFit.Extended(True), RooFit.Minos(False), RooFit.PrintEvalErrors(-1), RooFit.Warnings(False), RooFit.Minimizer("Minuit2", "minimize"), constraintCmd, rangeCmd ) fr.Print('v') return fr # determine the fitting model for each component and return them def makeComponentPdf(self, component, files, models, useAlternateModels, convModels): print 'making ComponentPdf %s' % component # print 'models = %s' % models # print 'files = %s' % files if convModels and not (convModels[0] == -1): thePdf = self.makeConvolvedPdf(component, files, models, useAlternateModels, convModels) elif (models[0] == -1): thePdf = self.makeComponentHistPdf(component, files) elif (models[0] == -2): thePdf = self.makeMorphingPdf(component, useAlternateModels, convModels) elif (models[0] == -3): pass else: thePdf = self.makeComponentAnalyticPdf(component, models, useAlternateModels) return thePdf #create a simple 2D histogram pdf def makeComponentHistPdf(self, component, files): if self.ws.pdf(component): return self.ws.pdf(component) compHist = self.utils.newEmptyHist('hist%s' % component) sumYields = 0. sumxsec = 0. sumExpected = 0. for (idx,fset) in enumerate(files): if hasattr(self.pars, '%scuts' % component): cutOverride = getattr(self.pars, '%scuts' % component) else: cutOverride = None filename = fset[0] tmpHist = self.utils.File2Hist(filename, 'hist%s_%i' % (component, idx), False,cutOverride,False,True,0) sumYields += tmpHist.Integral() sumxsec += fset[2] compHist.Add(tmpHist, self.pars.integratedLumi*fset[2]/fset[1]) sumExpected += tmpHist.Integral()*fset[2]* \ self.pars.integratedLumi/fset[1] print filename,'acc x eff: %.3g' % (tmpHist.Integral()/fset[1]) print filename,'N_expected: %.1f' % \ (tmpHist.Integral()*fset[2]*self.pars.integratedLumi/fset[1]) #tmpHist.Print() #compHist.Print() print '%s acc x eff: %.3g' % \ (component, sumExpected/sumxsec/self.pars.integratedLumi) print 'Number of expected %s events: %.1f' % (component, sumExpected) setattr(self, '%sExpected' % component, sumExpected) return self.utils.Hist2Pdf(compHist, component, self.ws, self.pars.order) #create a pdf which is a convolution of any two pdf def makeConvolvedPdf(self, component, files, models, useAlternateModels, convModels): if self.ws.pdf(component): return self.ws.pdf(component) #If a morphing model is selected, then convolve each individual component first and then morph if (models[0] == -2): return self.makeMorphingPdf(component, useAlternateModels, convModels) basePdf = self.makeComponentPdf('%s_base' % component, files, models, useAlternateModels, [-1]) convComponent = 'Global' ##Overwrite to use the same convolution model for all Pdfs convModel = getattr(self.pars, '%sConvModels' % convComponent) if useAlternateModels: convModel = getattr(self.pars, '%sConvModelsAlt' % convComponent) convPdf = self.makeComponentPdf('%s_conv' % convComponent, files, convModel, useAlternateModels, [-1]) var = self.pars.var[0] try: vName = self.pars.varNames[var] except AttributeError: vName = var self.ws.factory('RooFFTConvPdf::%s(%s,%s,%s)' % \ (component, vName, basePdf.GetName(), convPdf.GetName())) return self.ws.pdf(component) # create a pdf using the "template morphing" technique def makeMorphingPdf(self, component, useAlternateModels, convModels): if self.ws.pdf(component): return self.ws.pdf(component) filesNom = getattr(self.pars, '%s_NomFiles' % component) modelsNom = getattr(self.pars, '%s_NomModels' % component) filesMU = getattr(self.pars, '%s_MUFiles' % component) modelsMU = getattr(self.pars, '%s_MUModels' % component) filesMD = getattr(self.pars, '%s_MDFiles' % component) modelsMD = getattr(self.pars, '%s_MDModels' % component) filesSU = getattr(self.pars, '%s_SUFiles' % component) modelsSU = getattr(self.pars, '%s_SUModels' % component) filesSD = getattr(self.pars, '%s_SDFiles' % component) modelsSD = getattr(self.pars, '%s_SDModels' % component) if useAlternateModels: modelsNom = getattr(self.pars, '%s_NomModelsAlt' % component) modelsMU = getattr(self.pars, '%s_MUModelsAlt' % component) modelsMD = getattr(self.pars, '%s_MDModelsAlt' % component) modelsSU = getattr(self.pars, '%s_SUModelsAlt' % component) modelsSD = getattr(self.pars, '%s_SDModelsAlt' % component) # Adds five (sub)components for the component with suffixes Nom, MU, MD, SU, SD NomPdf = self.makeComponentPdf('%s_Nom' % component, filesNom, modelsNom, False, convModels) if hasattr(self, '%s_NomExpected' % component): setattr(self, '%sExpected' % component, getattr(self, '%s_NomExpected' % component)) MUPdf = self.makeComponentPdf('%s_MU' % component, filesMU, modelsMU, False, convModels) MDPdf = self.makeComponentPdf('%s_MD' % component, filesMD, modelsMD, False, convModels) SUPdf = self.makeComponentPdf('%s_SU' % component, filesSU, modelsSU, False, convModels) SDPdf = self.makeComponentPdf('%s_SD' % component, filesSD, modelsSD, False, convModels) fMU_comp = self.ws.factory("fMU_%s[0., -1., 1.]" % component) fSU_comp = self.ws.factory("fSU_%s[0., -1., 1.]" % component) fMU = RooFormulaVar("f_fMU_%s" % component, "1.0*@0*(@0 >= 0.)", RooArgList( fMU_comp ) ) fMD = RooFormulaVar("f_fMD_%s" % component, "-1.0*@0*(@0 < 0.)", RooArgList( fMU_comp ) ) fSU = RooFormulaVar("f_fSU_%s" % component, "@0*(@0 >= 0.)", RooArgList( fSU_comp ) ) fSD = RooFormulaVar("f_fSD_%s" % component, "@0*(-1)*(@0 < 0.)", RooArgList( fSU_comp ) ) fNom = RooFormulaVar("f_fNom_%s" % component, "(1.-abs(@0)-abs(@1))", RooArgList(fMU_comp,fSU_comp) ) morphPdf = RooAddPdf(component,component, RooArgList(MUPdf,MDPdf,SUPdf,SDPdf,NomPdf), RooArgList(fMU, fMD, fSU, fSD, fNom)) morphPdf.SetName(component) getattr(self.ws, 'import')(morphPdf) return self.ws.pdf(component) # create a pdf using an analytic function. def makeComponentAnalyticPdf(self, component, models, useAlternateModels): if self.ws.pdf(component): return self.ws.pdf(component) pdfList = [] systMult = None if ( hasattr(self.pars, '%sInterference' % component) and \ getattr(self.pars, '%sInterference' % component) and \ hasattr(self.pars, "%sdoSystMult" % component) and \ getattr(self.pars, "%sdoSystMult" % component) ): systMult = getattr(self.pars, "%sSystMult" % component) for (idx,model) in enumerate(models): var = self.pars.var[idx] try: vName = self.pars.varNames[var] except AttributeError: vName = var auxModel = None if useAlternateModels: if hasattr(self.pars, '%sAuxModelsAlt' % component): auxModel = getattr(self.pars, '%sAuxModelsAlt' % component)[idx] else: if hasattr(self.pars, '%sAuxModels' % component): auxModel = getattr(self.pars, '%sAuxModels' % component)[idx] pdfList.append(self.utils.analyticPdf(self.ws, vName, model, '%s_%s'%(component,vName), '%s_%s'%(component,vName), auxModel, systMult ) ) pdfListNames = [ pdf.GetName() for pdf in pdfList ] if len(pdfList) > 1: self.ws.factory('PROD::%s(%s)' % \ (component, ','.join(pdfListNames))) else: pdfList[0].SetName(component) return self.ws.pdf(component) def loadData(self, weight = False): if self.ws.data('data_obs'): return self.ws.data('data_obs') unbinnedName = 'data_obs' if self.pars.binData: unbinnedName = 'data_unbinned' data = self.utils.File2Dataset(self.pars.DataFile, unbinnedName, self.ws, weighted = weight) if self.pars.binData: data = RooDataHist('data_obs', 'data_obs', self.ws.set('obsSet'), data) getattr(self.ws, 'import')(data) data = self.ws.data('data_obs') return data def stackedPlot(self, var, logy = False, pdfName = None, Silent = False): if not pdfName: pdfName = 'total' xvar = self.ws.var(var) nbins = xvar.getBins() # if hasattr(self.pars, 'plotRanges') and not xvar.hasRange('plotRange'): # xvar.setRange('plotRange', self.pars.plotRanges[var][1], # self.pars.plotRanges[var][2]) # xvar.setBins(self.pars.plotRanges[var][0], 'plotBins') # elif not xvar.hasRange('plotRange'): # xvar.setRange('plotRange', xvar.getMin(), xvar.getMax()) # xvar.setBins(nbins, 'plotBins') sframe = xvar.frame(RooFit.Range('plotRange'), RooFit.Bins(xvar.getBins('plotBins'))) sframe.SetName("%s_stacked" % var) pdf = self.ws.pdf(pdfName) if isinstance(pdf, RooAddPdf): compList = RooArgList(pdf.pdfList()) else: compList = None data = self.ws.data('data_obs') nexp = pdf.expectedEvents(self.ws.set('obsSet')) if not Silent: print pdf.GetName(),'expected: %.0f' % (nexp) print 'data events: %.0f' % (data.sumEntries()) if nexp < 1: nexp = data.sumEntries() theComponents = [] if self.pars.includeSignal: theComponents += self.pars.signals theComponents += self.pars.backgrounds data.plotOn(sframe, RooFit.Invisible(), RooFit.Binning('plotBins')) # dataHist = RooAbsData.createHistogram(data,'dataHist_%s' % var, xvar, # RooFit.Binning('%sBinning' % var)) # #dataHist.Scale(1., 'width') # invData = RooHist(dataHist, 1., 1, RooAbsData.SumW2, 1.0, False) # #invData.Print('v') # sframe.addPlotable(invData, 'pe', True, True) for (idx,component) in enumerate(theComponents): if not Silent: print 'plotting',component,'...', if hasattr(self.pars, '%sPlotting' % (component)): plotCharacteristics = getattr(self.pars, '%sPlotting' % \ (component)) else: plotCharacteristics = {'color' : colorwheel[idx%6], 'title' : component } compCmd = RooCmdArg.none() if compList: compSet = RooArgSet(compList) if compSet.getSize() > 0: compCmd = RooFit.Components(compSet) removals = compList.selectByName('%s*' % component) compList.remove(removals) if not Silent: print 'events', self.ws.function('f_%s_norm' % component).getVal() sys.stdout.flush() if abs(self.ws.function('f_%s_norm' % component).getVal()) >= 1.: pdf.plotOn(sframe, #RooFit.ProjWData(data), RooFit.DrawOption('LF'), RooFit.FillStyle(1001), RooFit.FillColor(plotCharacteristics['color']), RooFit.LineColor(plotCharacteristics['color']), RooFit.VLines(), RooFit.Range('plotRange'), RooFit.NormRange('plotRange'), RooFit.Normalization(nexp, RooAbsReal.NumEvent), compCmd ) tmpCurve = sframe.getCurve() tmpCurve.SetName(component) tmpCurve.SetTitle(plotCharacteristics['title']) if 'visible' in plotCharacteristics: sframe.setInvisible(component, plotCharacteristics['visible']) data.plotOn(sframe, RooFit.Name('theData'), RooFit.Binning('plotBins')) sframe.getHist('theData').SetTitle('data') # theData = RooHist(dataHist, 1., 1, RooAbsData.SumW2, 1.0, True) # theData.SetName('theData') # theData.SetTitle('data') # sframe.addPlotable(theData, 'pe') if (logy): sframe.SetMinimum(0.01) sframe.SetMaximum(1.0e6) else: sframe.SetMaximum(sframe.GetMaximum()*1.35) pass excluded = (var in self.pars.exclude) bname = var if not excluded: for v in self.pars.exclude: if hasattr(self.pars, 'varNames') and \ (self.pars.varNames[v] == var): excluded = True bname = v if excluded: blinder = TBox(self.pars.exclude[bname][0], sframe.GetMinimum(), self.pars.exclude[bname][1], sframe.GetMaximum()) # blinder.SetName('blinder') # blinder.SetTitle('signal region') blinder.SetFillColor(kBlack) if self.pars.blind: blinder.SetFillStyle(1001) else: blinder.SetFillStyle(0) blinder.SetLineStyle(2) sframe.addObject(blinder) elif self.pars.blind: if not Silent: print "blind but can't find exclusion region for", var print 'excluded',excluded,self.pars.exclude print 'hiding data points' sframe.setInvisible('theData', True) else: sframe.setInvisible('theData', False) #sframe.GetYaxis().SetTitle('Events / GeV') # dataHist.IsA().Destructor(dataHist) if not Silent: print xvar.setBins(nbins) return sframe def readParametersFromFile(self, fname=None): if (not fname): fname = self.pars.initialParametersFile if isinstance(fname, str): flist = [ fname ] else: flist = fname for tmpName in flist: if len(tmpName) > 0: print 'loading parameters from file',tmpName self.ws.allVars().readFromFile(tmpName) def expectedFromPars(self): components = self.pars.signals + self.pars.backgrounds for component in components: theYield = self.ws.var('n_%s' % component) setattr(self, '%sExpected' % component, theYield.getVal()) def initFromExplicitVals(self,opts): #,init_diboson= -1.0,init_WpJ=-1.0,init_top=-1.0,init_ZpJ=-1.0,init_QCD=-1.0 components = ['diboson', 'top', 'WpJ', 'ZpJ', 'QCD', 'WHbb'] for component in components: #double init init = getattr(opts, 'ext%s' % component) #init = -2.0 #setattr(self,init, 'init_%s' % component) #init = init_%s % component #print "init=", init #init = self.ws.var('init_%s' % component) #init.setVal(100.0) #init.setVal('init_%s' % component) #init = theYield.getVal() if (init>0.): print 'setting initial value for ',component,' to ',init setattr(self, '%sInitial' % component, init) def resetYields(self): if self.ws.data('data_obs'): Ndata = self.ws.data('data_obs').sumEntries() else: Ndata = 10000. print 'resetting yields...' components = self.pars.signals + self.pars.backgrounds for component in components: theYield = self.ws.var('n_%s' % component) theNorm = self.ws.var('%s_nrm' % component) if hasattr(self, '%sInitial' % component): print 'explicitly setting initial value for ',component theYield.setVal(getattr(self, '%sInitial' % component)) theNorm.setVal(1.0) theNorm.setConstant() else: fracofdata = -1. if hasattr(self.pars, '%sFracOfData' % component): fracofdata = getattr(self.pars, '%sFracOfData' % component) if (fracofdata >= 0.): print 'explicitly setting ', component,' yield to be', fracofdata,' of data' theYield.setVal(fracofdata*Ndata) elif hasattr(self, '%sExpected' % component): theYield.setVal(getattr(self, '%sExpected' % component)) else: print 'no expected value for',component theYield.setVal(Ndata/len(components)) if theNorm and not theNorm.isConstant(): theNorm.setVal(1.0) if component in self.pars.yieldConstraints: theYield.setError(theYield.getVal() * \ self.pars.yieldConstraints[component]) if theNorm: theNorm.setError(self.pars.yieldConstraints[component]) else: theYield.setError(sqrt(theYield.getVal())) theYield.Print() def generateToyMCSet(self,var,inputPdf,outFileName,NEvts): fMC = TFile(outFileName, "RECREATE"); # thevar = self.ws.var(var); print 'thevar=' print var # print thevar print '...' # varList = RooArgList() # varList.add(self.ws.var(var)) toymc = inputPdf.generate(RooArgSet(self.ws.var(var)),NEvts); tMC = toymc.tree(); fMC.cd(); tMC.Write(); fMC.Close(); def legend4Plot(plot, left = False): if left: theLeg = TLegend(0.2, 0.62, 0.55, 0.92, "", "NDC") else: theLeg = TLegend(0.60, 0.62, 0.92, 0.92, "", "NDC") theLeg.SetName('theLegend') theLeg.SetBorderSize(0) theLeg.SetLineColor(0) theLeg.SetFillColor(0) theLeg.SetFillStyle(0) theLeg.SetLineWidth(0) theLeg.SetLineStyle(0) theLeg.SetTextFont(42) theLeg.SetTextSize(.045) entryCnt = 0 for obj in range(0, int(plot.numItems())): objName = plot.nameOf(obj) if (not plot.getInvisible(objName)): theObj = plot.getObject(obj) objTitle = theObj.GetTitle() if len(objTitle) < 1: objTitle = objName dopts = plot.getDrawOptions(objName).Data() # print 'obj:',theObj,'title:',objTitle,'opts:',dopts,'type:',type(dopts) if theObj.IsA().InheritsFrom('TNamed'): theLeg.AddEntry(theObj, objTitle, dopts) entryCnt += 1 theLeg.SetY1NDC(0.9 - 0.05*entryCnt - 0.005) theLeg.SetY1(theLeg.GetY1NDC()) return theLeg legend4Plot = staticmethod(legend4Plot)
latex.DrawLatex(0.19, 0.85, "#color[1]{FullSim}") latex.DrawLatex(0.42, 0.85, "#color[1]{#mu=%.3f, #sigma=%.3f}" % (h.GetMean(), h.GetRMS())) gPad.Print("plots/zmmtozbb_fit%i_%s.png" % (ifunc,h.GetName() if logy else h.GetName()+"_lin")) #gPad.Print("plots/zmmtozbb_fit%i_%s.pdf" % (ifunc,h.GetName() if logy else h.GetName()+"_lin")) continue if ifunc == 0: # Fit double gaussian #ws.factory("SUM::double_gaussian(Gaussian::gaus1(x,gm1[1.0,%.2f,%.2f],gs1[0.1,%.2f,%.2f]), " # "f[0.5,0,1]*Gaussian::gaus2(x,gm2[1.0,%.2f,%.2f],gs2[0.3,%.2f,%.2f]) )" # % (0.9,gmup, gslow/2,gsup/2, 0.8,gmup, gslow,gsup)) if ih_ == 1+7*2: ws.factory("Gaussian::gaus1(x,gm1[1.00,%.2f,%.2f],gs1[0.08,%.2f,%.2f])" % (0.95,1.05, 0.01,0.09)) gmlow, gmup = 0.90, 1.15 elif (ih_%7 == 0 or ih_%7 == 1): #ws.factory("Gaussian::gaus1(x,gm1[0.85,%.2f,%.2f],gs1[0.1,%.2f,%.2f])" % (0.8,1.05, 0.01,0.20)) ws.factory("Gaussian::gaus1(x,gm1[0.85,%.2f,%.2f],gs1[0.1,%.2f,%.2f])" % (0.8,1.05, 0.01,0.25)) #gmlow, gmup = 1.00, 1.15 gmlow, gmup = 0.90, 1.15 else: ws.factory("Gaussian::gaus1(x,gm1[1.0,%.2f,%.2f],gs1[0.1,%.2f,%.2f])" % (0.95,1.10, 0.01,0.16)) ws.factory("expr::gs2('gs1*gssf1',gs1,gssf1[1.05,1,6])") ws.factory("Gaussian::gaus2(x,gm2[1.0,%.2f,%.2f],gs2)" % (gmlow,gmup)) #ws.factory("Gaussian::gaus2(x,gm2[1.0,%.2f,%.2f],gs2)" % (gmlow,gmup)) ws.factory("SUM::double_gaussian(gaus1,f[0.5,0,1]*gaus2)") model = ws.pdf("double_gaussian")
getattr(ws,'import')(biasData) # if being executed run bias study if __name__ == '__main__': ntoys = int(sys.argv[1]) category = int(sys.argv[2]) mass = float(sys.argv[3]) channel = sys.argv[4] order = int(sys.argv[5]) turnon = sys.argv[6] #fitted turn on type!!! truth = sys.argv[7] #truth model type!!! bs = RooWorkspace('bias_study') bs.factory("procWeight[0]") bs.factory("puWeight[0]") bs.factory("weight[0]") bs.factory("Mzg[100,180]") bs.var("Mzg").setRange("ROI",mass-1.5,mass+1.5) bs.var("Mzg").setBins(40000,"cache") bs.factory("Mz[0]") #bs.factory("dMzg[0,25]") #bs.factory("dMz[0,25]") bs.factory("r94cat[cat1=1,cat2=2,cat3=3,cat4=4]") bs.defineSet("observables", "Mzg,Mz,r94cat,procWeight,puWeight") bs.defineSet("observables_weight", "Mzg,Mz,r94cat,procWeight,puWeight,weight") prepare_truth_models(bs,category,mass,channel,turnon,truth)
#!/usr/bin/env python from ROOT import TFile, TH1F, TH1D, TCanvas, RooRealVar, RooDataHist, RooDataSet, RooArgSet, RooArgList from ROOT import RooGaussian, RooAddPdf, RooPolynomial, RooExponential, RooCBShape, RooArgusBG, RooFit, RooWorkspace, RooGenericPdf inF = TFile.Open('a.root') space = RooWorkspace('space', False) # fit lbtkMass in 2016Data {{{ space.factory('mass[5.100,7.]') mass = space.var('mass') inH = inF.Get('lbtkCombBKG') datahist = RooDataHist('histogram', 'histogram', RooArgList(mass), inH) # create target PDF. #space.factory('EXPR::cPDF( "( exp(-1.*(mass-mShift)/(cPar1+cPar2)) - exp(-1.*(mass-mShift)/cPar1) )", mass, mShift[4.8,0.1,10.0], cPar1[0.44,0.001,100.],cPar2[0.0025,0.0000001,10.] )') space.factory( 'EXPR::cPDF( "( exp(-1.*(mass-mShift)/(cPar1+cPar2)) - exp(-1.*(mass-mShift)/cPar1) )", mass, mShift[4.8,0.1,10.0], cPar1[0.44,0.001,100.],cPar2[0.0025,0.0000001,10.] )' ) myPDF = space.pdf('cPDF') myPDF.fitTo(datahist) myPDF.fitTo(datahist) myPDF.fitTo(datahist) myFrame = mass.frame() datahist.plotOn(myFrame) myPDF.plotOn(myFrame) canv = TCanvas('c1', 'c1', 1600, 1000) myFrame.Draw() canv.SaveAs('store_fig/hout_simpleFit_lbDist_from2016Data.png')
## print "% 8.3g %8.2g " % (params[i][0].getVal() * factor, params[i][0].getError() * factor), ## factor = factor * math.sqrt(2) ## print "% 8.3g %8.2g " % (params[i][1].getVal() * factor, params[i][1].getError() * factor), ## for j in range(2, 4): ## print "% 8.3g %8.2g " % (params[i][j].getVal(), params[i][j].getError(),), ## print nentries = 5000 chains = getChains('v1') mcTree = chains['mc'] test1Tree = chains['test1'] w = RooWorkspace('w') mass = w.factory('mass[60, 120]') trange = (log(mass.getMin()/91.2), log(mass.getMax()/91.2)) t = w.factory('t[%f,%f]' % trange) t.SetTitle('log(mass/91.2)') weight = w.factory('weight[0, 999]') cuts = ['Entry$ < %d' % nentries] mData = dataset.get(tree=mcTree, variable=mass, weight=weight, cuts=cuts, name='mData') m1Data = dataset.get(tree=test1Tree, variable=mass, weight=weight, cuts=cuts, name='m1Data') tData = dataset.get(tree=mcTree, variable=t , weight=weight, cuts=cuts, name='tData') w.Import(mData)
alphadArr.append(0.01) alphadArr.append(0.001) nApMass = 50 massApmin = 0.07 massApmax = 0.139 fitfunc = TF1("fitfunc","[0]*exp( ((x-[1])<[3])*(-0.5*(x-[1])^2/[2]^2) + ((x-[1])>=[3])*(-0.5*[3]^2/[2]^2-(x-[1]-[3])/[4]))",-50,50) fitfunc.SetParName(0,"Amplitude") fitfunc.SetParName(1,"Mean") fitfunc.SetParName(2,"Sigma") fitfunc.SetParName(3,"Tail Z") fitfunc.SetParName(4,"Tail length") w = RooWorkspace("w") w.factory("{0}[0,0.1]".format("uncM")) w.factory("uncVZ[-100,100]") w.factory("uncP[0,10]") w.factory("cut[0,1]") w.defineSet("myVars","{0},uncVZ".format("uncM")) events = infile.Get("cut") #eventsrad = radfile.Get("ntuple") eventsprompt = promptfile.Get("cut") dataset = RooDataSet("data","data",events,w.set("myVars"),"") w.factory("Gaussian::vtx_model(uncVZ,mean[-50,50],sigma[0,50])") gauss_pdf = w.pdf("vtx_model") w.factory("EXPR::gaussExp('exp( ((@0-@1)<@3)*(-0.5*(@0-@1)^2/@2^2) + ((@0-@1)>=@3)*(-0.5*@3^2/@2^2-(@0-@1-@3)/@4))',uncVZ,gauss_mean[-5,-20,20],gauss_sigma[5,1,50],exp_breakpoint[10,0,50],exp_length[3,0.5,20])") gaussexp_pdf = w.pdf("gaussExp")
yedges) candHist = TH1D("candidates", "candidates", n_massbins, xedges) fcLowerHist = TH2D("fcLowerLimit", "fcLowerLimit", n_massbins, xedges, n_epsbins, yedges) fcUpperHist = TH2D("fcUpperLimit", "fcUpperLimit", n_massbins, xedges, n_epsbins, yedges) plrPvalHist = TH2D("plrPval", "plrPval", n_massbins, xedges, n_epsbins, yedges) plrSigHist = TH2D("plrSig", "plrSig", n_massbins, xedges, n_epsbins, yedges) logplrHist = TH2D("logplr", "logplr", n_massbins, xedges, n_epsbins, yedges) candRescaledHist = TH1D("candidates_rescaled", "candidates_rescaled", 100, 0, 1.0) candRescaled2DHist = TH2D("candidates_rescaled_2d", "candidates_rescaled_2d", n_massbins, xedges, 100, 0, 1.0) w = RooWorkspace("w") w.factory("{0}[0,0.18]".format(massVar)) w.factory("{0}[-100,100]".format(vtxVar)) w.factory("uncP[0,10]") w.factory("cut[0,1]") w.defineSet("myVars", "{0},{1}".format(massVar, vtxVar)) dataset = RooDataSet("data", "data", events, w.set("myVars"), "") w.factory("Gaussian::vtx_model({0},mean[-50,50],sigma[0,50])".format(vtxVar)) gauss_pdf = w.pdf("vtx_model") w.factory( "EXPR::gaussExp('exp( ((@0-@1)<@3)*(-0.5*(@0-@1)^2/@2^2) + ((@0-@1)>=@3)*(-0.5*@3^2/@2^2-(@0-@1-@3)/@4))',{0},gauss_mean[0,-20,20],gauss_sigma[5,1,50],exp_breakpoint[10,0,50],exp_length[3,0.5,20])" .format(vtxVar)) gaussexp_pdf = w.pdf("gaussExp") w.defineSet("obs_1d", vtxVar)
#print ' content[i] = {0}'.format(content[i]) #print ' recList[i] = {0}'.format(recList[i]) #print ' content[i][values] = {0}'.format(content[i]['values']) aaa = content[i] #recList[i]=content[i]['values'][idx] recList[i] = aaa['values'][idx] # used for find flat cut values end # def functions end }}} recTime('def funcs') space = RooWorkspace('space', False) # new parameters space.factory('bsMass[5.25,5.50]') space.factory('bdMass[5.10,5.50]') space.factory('bdbarMass[5.10,5.50]') space.factory('lbtkMass [5.4,5.9]') space.factory('lbtkbarMass[5.4,5.9]') space.factory('kkMass[0.8,10.0]') space.factory('kpiMass[0.8,10.0]') space.factory('kpibarMass[0.8,10.0]') ########## load workspace #################### workspaceFile1 = TFile.Open('store_root/workspace_1stStep_MCShape.root') space1st = workspaceFile1.Get('space') space1st.SetName('space1st') workspaceFile2 = TFile.Open('store_root/workspace_2ndStep_dataFit.root') space2nd = workspaceFile2.Get('space')
def recTime(mesg): global currenttime rectime = currenttime currenttime = time.time() txtRecFile.write('{0} in {1} minutes\n'.format( mesg, int((currenttime - rectime) / 60))) # def functions end }}} recTime('def funcs') space = RooWorkspace('space', False) # new parameters space.factory('lbl0Mass[5.4,5.9]') space.factory('tktkMass[0.5,2.0]') space.factory('lbl0Pt[0.,200.]') ########## load workspace #################### workspaceFile1 = TFile.Open('store_root/workspace_0thStep_LbL0Shape.root') space1st = workspaceFile1.Get('space') space1st.SetName('space1st') spaceExt = space1st load2016Data = True if load2016Data: toyCheck = False sysFitLb = True sysFitlB = True
class DatacardBuilder(object): """ Class for building datacards, both textual and workspace part EXAMPLE_____________________________________________________________ #*** HEADER *** imax 1 number of bins jmax 5 number of processes minus 1 kmax 14 number of nuisance parameters ---------------------------------------------------------------------------------------------------------------------------------- shapes * ch1 hzz4l_2e2muS_8TeV_xs_SM_125_mass4l_v3.Databin0.root w:$PROCESS ---------------------------------------------------------------------------------------------------------------------------------- bin ch1 observation 8.0 #***PER-PROCESS INFORMATION *** ---------------------------------------------------------------------------------------------------------------------------------- bin ch1 ch1 ch1 ch1 ch1 ch1 process trueH2e2muBin0_8 bkg_zjets_8 bkg_ggzz_8 bkg_qqzz_8 out_trueH_8 fakeH_8 process 0 1 2 3 4 5 rate 1.0000 1.0526 0.3174 5.7443 1.0000 0.5684 ---------------------------------------------------------------------------------------------------------------------------------- CMS_eff_e lnN 1.046 - 1.046 1.046 1.046 1.046 EXAMPLE_____________________________________________________________ """ def __init__(self, datacard_name, datacard_input): self.my_logger = Logger() self.log = self.my_logger.getLogger(self.__class__.__name__, 10) self.DEBUG = self.my_logger.is_debug() self.pp = pprint.PrettyPrinter(indent=4) # self.datacard_name = datacard_name self.d_input = datacard_input self.log.debug('Datacard: {0} Datacard input: {1}'.format(self.datacard_name, self.d_input)) #self.not_a_process = ['observation','functions_and_definitions', 'setup'] self.not_a_process = self.d_input['setup']['reserved_sections'] self.lumi_scaling = 1.0 #process lists self.signal_process_list = self._get_processes('signal') self.bkg_process_list = self._get_processes('background') self.process_list = self.signal_process_list+self.bkg_process_list self.log.debug('Processes: {0}'.format(self.process_list)) #self.n_systematics, self.systematics_lines = self._get_systematics_lines() self.card_header='' #set of information lines os a header of the card. def make_txt_card(self): """Make text part of the datacard and dump to a file. - loop on processes and fill in txt card lines """ self.process_lines = self._get_process_lines() self.n_systematics, self.systematics_lines = self._get_systematics_lines() txt_card = """ Datacard for event category: {cat} {card_header} --------------------------------------- imax 1 number of bins jmax {jmax} number of processes minus 1 kmax {kmax} number of nuisance parameters --------------------------------------- {shapes_line} --------------------------------------- bin cat_{cat} observation {n_observed} --------------------------------------- bin {process_cat} process {process_name} process {process_number} rate {process_rate} --------------------------------------- """.format(cat = self.datacard_name, jmax = (len(self.process_list)-1), kmax = self.n_systematics, shapes_line = self._get_shapes_line(), n_observed = self._get_observation(), process_cat = self.process_lines['bin'], process_name = self.process_lines['name'], process_number = self.process_lines['number'], process_rate = self.process_lines['rate'], #process_systematics = self.systematics_lines, card_header = self.card_header ) txt_card = textwrap.dedent(txt_card) txt_card+= textwrap.dedent(self.systematics_lines) print txt_card file_datacard_name = self.datacard_name+'.txt' if self.lumi_scaling != 1.0: file_datacard_name = file_datacard_name.replace('.txt', '.lumi_scale_{0:3.2f}.txt'.format(self.lumi_scaling)) with open(file_datacard_name, 'w') as file_datacard: file_datacard.write(textwrap.dedent(txt_card)) file_datacard.write(textwrap.dedent(self.systematics_lines)) self.log.info('Datacard saved: {0}'.format(file_datacard_name)) def _get_shapes_line(self): """ Gets the line with shape shapes * {cat} {cat}.root w:$PROCESS """ self.shapes_exist = False for p in self.process_list: self.log.debug('Checking for shape in {0}/{1}'.format(self.datacard_name, p)) try: self.d_input[p]['shape'] except KeyError: pass else: if self.d_input[p]['shape']: self.shapes_exist = True self.shapes_output_file = "{0}.input.root".format(self.datacard_name) if self.lumi_scaling != 1.0: self.shapes_output_file = self.shapes_output_file.replace('input','lumi_scale_{0:3.2f}.input'.format(self.lumi_scaling)) break if self.shapes_exist: return "shapes * cat_{cat} {shapes_output_file} w:$PROCESS".format(cat = self.datacard_name, shapes_output_file = self.shapes_output_file) else: return "#shapes are not used - counting experiment card" def _get_processes(self, process_type='signal,background'): """Read the input dictionary and count processes. """ sig_process_list = [] bkg_process_list = [] process_list=[] for p in self.d_input.keys(): if p not in self.not_a_process: if self.d_input[p]['is_signal']: sig_process_list.append(p) else: bkg_process_list.append(p) if 'signal' in process_type.lower(): process_list+=sorted(sig_process_list) if 'background' in process_type.lower(): process_list+=sorted(bkg_process_list) return process_list def _get_process_lines(self): """ Gets and formats lines coresponding to processes from the self.process_list """ process_lines = {'bin': '', 'name':'', 'number':'', 'rate':'','sys':''} #get enumerates from signal and background processes #signal_process_list = [] #bkg_process_list = [] #for p in self.process_list: #if self.d_input[p]['is_signal']: #signal_process_list.append(p) #else: #bkg_process_list.append(p) #self.signal_process_list = sorted(signal_process_list) #self.bkg_process_list = sorted(bkg_process_list) signal_process_dict = dict(enumerate(self.signal_process_list, start=-(len(self.signal_process_list)-1))) bkg_process_dict = dict(enumerate(self.bkg_process_list, start=1)) #constructing the lines is_first = True for p_number in sorted(signal_process_dict.keys()): #delimiter = '\t\t' delimiter = ' ' if is_first: delimiter = '' is_first = False p_name = signal_process_dict[p_number] process_lines['bin'] += ( delimiter + 'cat_' + str(self.datacard_name) ) process_lines['name'] += ( delimiter + str(p_name) ) process_lines['number'] += ( delimiter + str(p_number) ) process_lines['rate'] += ( delimiter + str(float(self.d_input[p_name]['rate']) * self.lumi_scaling) ) process_lines['sys'] = "#systematics line: not implemented yet!!!" for p_number in sorted(bkg_process_dict.keys()): #delimiter = '\t\t' delimiter = ' ' if is_first: delimiter = '' is_first = False p_name = bkg_process_dict[p_number] process_lines['bin'] += ( delimiter + 'cat_' + str(self.datacard_name) ) process_lines['name'] += ( delimiter + str(p_name) ) process_lines['number'] += ( delimiter + str(p_number) ) process_lines['rate'] += ( delimiter + str(float(self.d_input[p_name]['rate']) * self.lumi_scaling) ) process_lines['sys'] = "#systematics line: not implemented yet!!!" return process_lines def _get_observation(self): """ Read the data from trees and applies a cut. So far, we only get rate directly as a number. """ return self.d_input['observation']['rate'] def _get_systematics_lines(self): """ Find systematics and construct a table/dict """ systematics_lines_list = [] sys_dict = self.d_input['systematics'] #loop on keys, i.e. sys names and append value if process found, otherwise, append '-' for sys_id in sys_dict.keys(): values = [] for sig_id in self.signal_process_list: try: value = sys_dict[sys_id][sig_id] except KeyError: value = '-' values.append(str(value)) for bkg_id in self.bkg_process_list: try: value = sys_dict[sys_id][bkg_id] except KeyError: value = '-' values.append(str(value)) if sys_dict[sys_id]['type'].startswith('param'): values=[] systematics_lines_list.append('{0} {1} {2}'.format(sys_id, sys_dict[sys_id]['type'],string.join(values,' ') )) self.log.debug('Systematic line: {0} '.format(systematics_lines_list[-1])) #show the last one systematics_lines = '' n_systematics = 0 for line in systematics_lines_list: systematics_lines += line systematics_lines += '\n' n_systematics += 1 return (n_systematics, systematics_lines) def make_workspace(self): """Make RooWorkspace and dump to a file""" gSystem.AddIncludePath("-I$CMSSW_BASE/src/ "); gSystem.Load("$CMSSW_BASE/lib/slc5_amd64_gcc472/libHiggsAnalysisCombinedLimit.so"); gSystem.AddIncludePath("-I$ROOFITSYS/include"); self.w = RooWorkspace('w') #run all functions_and_definitions: for factory_statement in self.d_input['functions_and_definitions']: self.w.factory(factory_statement) for p in self.process_list: self.log.debug('Checking for shape in {0}/{1}'.format(self.datacard_name, p)) try: self.d_input[p]['shape'] except KeyError: pass else: if self.d_input[p]['shape']: self.shapes_exist = True self.w.factory(self.d_input[p]['shape']) self.log.debug('Printing workspace...') self.data_obs = self.w.pdf('ggH').generate(RooArgSet(self.w.var('mass4l')), self._get_observation()) self.data_obs.SetNameTitle('data_obs','data_obs') getattr(self.w,'import')(self.data_obs) if self.DEBUG: print 20*"----" self.w.Print() print 20*"----" self.w.writeToFile(self.shapes_output_file) self.log.debug('Datacard workspace written to {0}'.format(self.shapes_output_file)) def scale_lumi_by(self, lumi_scaling): """ Scales luminosity in datacards by a fixed factor. This can be used to get exclusion limits projections with higher luminosities. """ self.lumi_scaling = lumi_scaling if self.lumi_scaling != 1.0: self.card_header+='Rates in datacard are scaled by a factor of {0}'.format(self.lumi_scaling) self.log.debug('Rates in datacards will be scaled by a factor of {0}'.format(self.lumi_scaling))
fitter_mWW.ws.var('r_signal').setRange(-3., 9.) fitter_mWW.ws.var('r_signal').setConstant(False) params_mWW.Print("v") fitter_mWW.ws.defineSet("params", params_mWW) var_name = pars_mWW.var[0] other_var = pars_mWW.var[1] compPdfs = [] for comp in components: compPdf = fitter_mWW.ws.pdf(comp) getattr(combinedWS, 'import')(compPdf) newPdf = combinedWS.pdf(comp) newPdf.SetName('%s_%s' % (comp, var_name)) norm = combinedWS.function('f_%s_norm' % comp) combinedWS.factory('PROD::%s(%s_%s, %s)' % (comp, comp, other_var, newPdf.GetName())) compPdfs.append(combinedWS.factory('RooExtendPdf::%s_extended(%s, %s)' %\ (comp, comp, norm.GetName()))) combinedWS.defineSet('obsSet', '%s,%s' % (other_var, var_name)) if opts.sigInject: combinedWS.var('r_signal').setVal(opts.sigInject) combinedWS.var('r_signal').setError(0.1) combinedWS.var('r_signal').setRange(-3., 9.) combinedWS.var('r_signal').setConstant(False) compNames = [ c.GetName() for c in compPdfs ] compList = RooArgList(combinedWS.argSet(','.join(compNames))) getattr(combinedWS, 'import')(RooAddPdf('total', 'total', compList)) combinedPdf = combinedWS.pdf('total')
#!/usr/bin/env python from ROOT import TFile, TCanvas from ROOT import RooDataSet, RooWorkspace, RooArgSet tf = TFile.Open('AnalysisOut.root') tree = tf.Get('AnalysisTree') ws = RooWorkspace("w", "w") observables = RooArgSet() ws.defineSet("observables", observables) ws.factory("mass[5050,6000]") getattr(ws, 'set')("observables").add(ws.var("mass")) ws.factory("gamgams_pt[0,40e3]") getattr(ws, 'set')("observables").add(ws.var("gamgams_pt")) mc = RooDataSet('mc', '', getattr(ws, 'set')('observables')) data = RooDataSet('data', '', getattr(ws, 'set')('observables')) for ev in range(tree.GetEntries()): tree.GetEntry(ev) if tree.itype != -88 and tree.itype != 72 and tree.itype != 82: continue if tree.bdtoutput < 0.2: continue if tree.B0_MM < ws.var("mass").getMin() or tree.B0_MM > ws.var( "mass").getMax(): continue ws.var("mass").setVal(tree.B0_MM) ws.var("gamgams_pt").setVal(tree.gamgams_PT)
def rooFit511(compact=kFALSE): workspace = RooWorkspace("workspace") print ">>> creating and adding basic pdfs..." # Remake example pdf of tutorial rf502_wspacewrite.C: # # Basic pdf construction: ClassName::ObjectName(constructor arguments) # Variable construction: VarName[x,xlo,xhi], VarName[xlo,xhi], VarName[x] # pdf addition: SUM::ObjectName(coef1*pdf1,...coefM*pdfM,pdfN) if not compact: # Use object factory to build pdf of tutorial rooFit502_wspacewrite workspace.factory("Gaussian::sig1(x[-10,10],mean[5,0,10],0.5)") workspace.factory("Gaussian::sig2(x,mean,1)") workspace.factory("Chebychev::bkg(x,{a0[0.5,0.,1],a1[-0.2,0.,1.]})") workspace.factory("SUM::sig(sig1frac[0.8,0.,1.]*sig1,sig2)") workspace.factory("SUM::model(bkgfrac[0.5,0.,1.]*bkg,sig)") else: # Use object factory to build pdf of tutorial rf502_wspacewrite but # - Contracted to a single line recursive expression, # - Omitting explicit names for components that are not referred to explicitly later workspace.factory("SUM::model(bkgfrac[0.5,0.,1.]*Chebychev::bkg(x[-10,10],{a0[0.5,0.,1],a1[-0.2,0.,1.]}),"+\ "SUM(sig1frac[0.8,0.,1.]*Gaussian(x,mean[5,0,10],0.5),Gaussian(x,mean,1)))") print ">>> advanced pdf constructor arguments..." # pdf constructor arguments may by any type of RooAbsArg, but also the follow conversion are made: # Double_t --> RooConst(...) # {a,b,c} --> RooArgSet() or RooArgList() depending on required ctor arg # dataset name --> RooAbsData reference for any dataset residing in the workspace # enum --> any enum label that belongs to an enum defined in the (base) class print ">>> generate a dummy dataset from 'model' pdf and import it in the workspace..." data = workspace.pdf("model").generate(RooArgSet(workspace.var("x")), 1000) # RooDataSet getattr(workspace, "import")(data, Rename("data")) print ">>> construct keys pdf..." # Construct a KEYS pdf passing a dataset name and an enum type defining the # mirroring strategy workspace.factory("KeysPdf::k(x,data,NoMirror,0.2)") print ">>> workspace contents:" workspace.Print() print ">>> save workspace in memory (gDirectory)..." gDirectory.Add(workspace)
#!/usr/bin/env python # apply cut and fit sideband only # use dataDriven to check background from ROOT import TLegend from ROOT import TFile, TH1F, TH1D, TCanvas, RooRealVar, RooDataSet, RooDataHist, RooArgSet, RooArgList, TGaxis from ROOT import RooGaussian, RooAddPdf, RooPolynomial, RooExponential, RooCBShape, RooArgusBG, RooFit, RooWorkspace, RooGenericPdf, RooCategory, RooSimultaneous, RooMsgService, RooKeysPdf outFileName = 'store_root/workspace_dataBrowse_dataAndComponentsStack.root' outFig = 'store_fig/pdf_workspace_dataBrowse_dataAndComponentsStack.pdf' space = RooWorkspace('space', False) space.factory('lbtkMass[5.3,6.0]') space.factory('bdbarMass[2.5,8.]') space.factory('bdMass[3.5,7.5]') space.factory('bsMass[4.,8.]') space.factory('kkMass[0.2,2.4]') space.factory('kpiMass[0.5,2.2]') space.factory('kpibarMass[0.5,2.2]') space.factory('tk1Pt[0.,100.]') space.factory('tk2Pt[0., 70.]') # output workspace outFile = TFile(outFileName, 'recreate') figDir = outFile.mkdir('figs') fitDir = outFile.mkdir('fitRes') # output canvas canv = TCanvas('c1', '', 1000, 1000) canv.SetFillColor(4000)
## Pairs of photon scale and extra smearing. sTest = [-2, 0.5] rTest = [1, 0.5] phoPtRange = (15,20) chains = getChains('v11') mcTree = chains['z'] dataTree = chains['data'] w = RooWorkspace('w') massShift = 90 + 1.03506 ## Define variables mmgMass = w.factory('mmgMass[40, 180]') mmgMassShifted = w.factory('mmgMassShifted[-50, 90]') mmgGenMass = w.factory('mmgGenMass[0, 300]') mmgMassPhoGenE = w.factory('mmgMassPhoGenE[0, 300]') mmgMassShiftedPhoGenE = w.factory('mmgMassShiftedPhoGenE[-90, 210]') mmgMassPhoSmear = w.factory('mmgMassPhoSmear[-30,30]') mmgMassPhoSmear.SetTitle('mmgMass - mmgMassPhoGenE') phoERes = w.factory('phoERes[-0.3,3]') mmMass = w.factory('mmMass[10, 180]') weight = w.factory('weight[1]') phoScale = w.factory('phoScale[0,-50,50]') weight.SetTitle('pileup.weight') ## Shift mmg mass to peak at zero so that the mass spectrum can be treated ## as the detector resolution in the FFT convolution. mmgMassShifted.SetTitle('mmgMass - %g' % float(massShift))
def plotStuff(plotList, plotstring, cutstring, plotfile, plotname, xlabel, ylabel, unitnorm): isFirst = True w = RooWorkspace("w") w.factory("x[-100,100]") for dataset in plotList: dataset[0].Draw(plotstring.format(dataset[1]), cutstring, "goff") hist = gDirectory.Get(dataset[1]) data = RooDataHist(dataset[1], dataset[1], RooArgList(w.var("x")), hist) getattr(w, 'import')(data) w.factory("HistPdf::triPdf(x,tri)") w.factory("HistPdf::wabPdf(x,wab)") w.factory("prod::triscale(a[0.3,0,10],{0})".format( w.data("tri").sum(False))) w.factory("prod::wabscale(b[0.1,0,10],{0})".format( w.data("wab").sum(False))) w.factory("SUM::sumModel(triscale*triPdf,wabscale*wabPdf)") w.pdf("sumModel").fitTo(w.data("data"), RooFit.SumW2Error(True), RooFit.Extended(True), RooFit.Verbose(False), RooFit.PrintLevel(-1)) #w.pdf("sumModel").fitTo(w.data("data"),RooFit.Extended(True)) #w.pdf("sumModel").fitTo(w.data("data")) frame = w.var("x").frame() w.data("data").plotOn(frame) #w.pdf("triPdf").plotOn(frame) #w.pdf("wabPdf").plotOn(frame) w.pdf("sumModel").plotOn(frame) w.pdf("sumModel").paramOn(frame) frame.SetTitle(gDirectory.Get("data").GetTitle()) frame.Draw() c.Print(plotfile) c.Clear() dataHist = gDirectory.Get("data") triHist = gDirectory.Get("tri") wabHist = gDirectory.Get("wab") if legendright: leg = TLegend(0.7, 0.75, 0.9, 0.9) else: leg = TLegend(0.1, 0.75, 0.3, 0.9) hs = THStack("hs", plotname) for dataset in plotList: hist = gDirectory.Get(dataset[1]) #hist.Sumw2() if unitnorm: hist.Scale(1.0 / hist.Integral()) else: hist.Scale(1.0 / dataset[2]) print "{0} {1} {2}".format(plotname, dataset[4], hist.Integral()) hist.SetLineColor(dataset[3]) leg.AddEntry(hist, dataset[4]) hs.Add(hist) #hist.GetXaxis().SetTitle(xlabel) hist.GetYaxis().SetTitle(ylabel) #if isFirst: #hist.GetXaxis().SetTitle(xlabel) #hist.GetYaxis().SetTitle(ylabel) #hist.Draw() #else: #hist.Draw("same") isFirst = False sumHist = triHist.Clone("sum") sumHist.Add(wabHist) if unitnorm: sumHist.Scale(1.0 / sumHist.Integral()) sumHist.SetLineColor(6) leg.AddEntry(sumHist, "MC sum") hs.Add(sumHist) hs.Draw("nostack") hs.GetXaxis().SetTitle(xlabel) hs.GetYaxis().SetTitle(ylabel) leg.Draw() c.Print(plotfile)
def setup_workspace(config): import ROOT from ROOT import RooWorkspace, gROOT, gStyle, RooAbsReal, RooMsgService, RooFit #from ROOT import RooFit, gROOT, gDirectory, gStyle, gPad, TTree, RooCmdArg,RooBinning #from ROOT import RooRealVar, RooMappedCategory, RooCategory, RooFormulaVar, RooAbsData #from ROOT import RooBMixDecay, RooMCStudy, RooAddModel, RooEffProd, RooMsgService #from ROOT import RooWorkspace, TCanvas, TFile, kFALSE, kTRUE, RooDataSet, TStopwatch #from ROOT import RooArgSet, RooArgList, RooRandom, RooMinuit, RooAbsReal, RooDataHist #from ROOT import TBrowser, TH2F, TF1, TH1F, RooGenericPdf, RooLinkedList from math import sqrt gROOT.SetStyle("Plain") gStyle.SetPalette(1) gStyle.SetOptStat(0) gStyle.SetOptFit(0) gStyle.SetOptStat(1111) gStyle.SetOptFit(10111) gStyle.SetOptTitle(1) #gROOT.ProcessLine(".L RooGaussianTrunk.cxx+") #gROOT.ProcessLine(".L RooCBShapeTrunk.cxx+") #gROOT.ProcessLine(".L RooChebychevTrunk.cxx+") #from ROOT import RooGaussianTrunk, RooChebychevTrunk, RooCBShapeTrunk #RooAbsReal.defaultIntegratorConfig().Print() RooAbsReal.defaultIntegratorConfig().setEpsAbs(1e-8) RooAbsReal.defaultIntegratorConfig().setEpsRel(1e-8) #RooAbsReal.defaultIntegratorConfig().setEpsAbs(1e-6) #RooAbsReal.defaultIntegratorConfig().setEpsRel(1e-6) RooAbsReal.defaultIntegratorConfig().Print() print "Numeric integration set up" #TODO: is the integration acceptable? ##This controls the logging output from RooFit #RooMsgService.instance().addStream(RooFit.DEBUG,RooFit.Topic(RooFit.Fitting)) RooMsgService.instance().deleteStream(1) #RooMsgService.instance().addStream(RooFit.INFO,RooFit.Topic(RooFit.Generation + RooFit.Minization + RooFit.Plotting + RooFit.Fitting + RooFit.Integration + RooFit.LinkStateMgmt + RooFit.Eval + RooFit.Caching + RooFit.Optimization + RooFit.ObjectHandling + RooFit.InputArguments + RooFit.Tracing + RooFit.Contents + RooFit.DataHandling + RooFit.NumericIntegration)) RooMsgService.instance().addStream(RooFit.INFO,RooFit.Topic(RooFit.LinkStateMgmt + RooFit.Caching + RooFit.ObjectHandling + RooFit.InputArguments + RooFit.Tracing)) RooMsgService.instance().Print() print "Message service set up" w = RooWorkspace("w",False) w.factory("RAND[0,1]") if "norm" not in config["mode"]: D0_Mass = w.factory("D0_Mass[1815,1915]") else: D0_Mass = w.factory("D0_Mass[1800,1930]") D0_Mass.setUnit("MeV") D0_Mass.setBins(60) Del_Mass = w.factory("Del_Mass[139,155]") Del_Mass.setUnit("MeV") Del_Mass.setBins(60) if "norm" not in config["mode"]: Dataset = w.factory("DataSet[BDT1,BDT2,BDT3]") else: Dataset = w.factory("DataSet[Norm]") w.factory("classID[Sig=0,Bkg=1]") w.factory("BDT_ada[-1,1]") w.factory("x1_PIDe[-2,20]") w.factory("x2_ProbNNmu[0,1]") #D0_Mass.setRange("blinded",1700.,1900.) if "norm" not in config["mode"]: dataCats = ["", "BDT1", "BDT2", "BDT3"] else: dataCats = ["", "Norm"] for data in dataCats: for dst_side in ["", "delsig", "delhigh", "dellow"]: for d_side in ["", "dsig", "dhigh", "dlow", "dhigh1", "dlow1", "dhigh2", "dlow2"]: name = data+dst_side+d_side if data == "BDT1": Dataset.setRange(name,"BDT1") elif data == "BDT2": Dataset.setRange(name,"BDT2") elif data == "BDT3": Dataset.setRange(name,"BDT3") elif data == "Norm": Dataset.setRange(name,"Norm") if dst_side == "delhigh": Del_Mass.setRange(name,148.,155.) elif dst_side == "delsig": Del_Mass.setRange(name,143.,148.) elif dst_side == "dellow": Del_Mass.setRange(name,139.,143.) if d_side == "dhigh2": D0_Mass.setRange(name,1910.,1930.) elif d_side == "dhigh1": D0_Mass.setRange(name,1890.,1910.) elif d_side == "dhigh": D0_Mass.setRange(name,1890.,1930.) elif d_side == "dsig": D0_Mass.setRange(name,1840.,1890.) elif d_side == "dlow": D0_Mass.setRange(name,1800.,1840.) elif d_side == "dlow1": D0_Mass.setRange(name,1820.,1840.) elif d_side == "dlow2": D0_Mass.setRange(name,1800.,1820.) w.defineSet("args","D0_Mass,Del_Mass,DataSet") w.defineSet("argsBasic","D0_Mass,Del_Mass") #w.defineSet("argsPreCut","D0_Mass,Del_Mass,RAND,classID,BDT_ada") w.defineSet("argsPreCut","D0_Mass,Del_Mass,RAND,classID,BDT_ada,x1_PIDe,x2_ProbNNmu") w.defineSet("argsPreCutPiPi","D0_Mass,Del_Mass,RAND") w.defineSet("argsPreCutKPi","D0_Mass,Del_Mass,RAND") # --- Norm --- if config['norm'] is "kpi": w.factory("{D0_Mass,Norm_D0M_Min[1815],Norm_D0M_Max[1915]}") else: w.factory("{D0_Mass,Norm_D0M_Min[1826],Norm_D0M_Max[1920]}") w.factory("RooGenericPdf::Norm_D0M_Range('(D0_Mass>Norm_D0M_Min&&D0_Mass<Norm_D0M_Max)',{D0_Mass,Norm_D0M_Min,Norm_D0M_Max})") w.factory("RooFormulaVar::Norm_D0M_Sig_Gaus2_Sigma('Norm_D0M_Sig_Gaus1_Sigma+Norm_D0M_Sig_Gaus2_Sigma_Diff',{Norm_D0M_Sig_Gaus1_Sigma[5,0,10],Norm_D0M_Sig_Gaus2_Sigma_Diff[5,0.,10.]})") w.factory("RooFormulaVar::Norm_D0M_Sig_Gaus3_Sigma('Norm_D0M_Sig_Gaus1_Sigma+Norm_D0M_Sig_Gaus2_Sigma_Diff+Norm_D0M_Sig_Gaus3_Sigma_Diff',{Norm_D0M_Sig_Gaus1_Sigma,Norm_D0M_Sig_Gaus2_Sigma_Diff,Norm_D0M_Sig_Gaus3_Sigma_Diff[2,0.,20.]})") w.factory("RooFormulaVar::Norm_D0M_Sig_Gaus1_Sigma_Scaled('Norm_D0M_Sig_Gaus1_Sigma*Norm_D0M_Sig_Gaus_Sigma_Scale',{Norm_D0M_Sig_Gaus1_Sigma,Norm_D0M_Sig_Gaus_Sigma_Scale[1]})") w.factory("RooFormulaVar::Norm_D0M_Sig_Gaus2_Sigma_Scaled('(Norm_D0M_Sig_Gaus1_Sigma+Norm_D0M_Sig_Gaus2_Sigma_Diff)*Norm_D0M_Sig_Gaus_Sigma_Scale',{Norm_D0M_Sig_Gaus1_Sigma,Norm_D0M_Sig_Gaus2_Sigma_Diff,Norm_D0M_Sig_Gaus_Sigma_Scale})") w.factory("RooFormulaVar::Norm_D0M_Sig_Gaus3_Sigma_Scaled('(Norm_D0M_Sig_Gaus1_Sigma+Norm_D0M_Sig_Gaus2_Sigma_Diff+Norm_D0M_Sig_Gaus3_Sigma_Diff)*Norm_D0M_Sig_Gaus_Sigma_Scale',{Norm_D0M_Sig_Gaus1_Sigma,Norm_D0M_Sig_Gaus2_Sigma_Diff,Norm_D0M_Sig_Gaus3_Sigma_Diff,Norm_D0M_Sig_Gaus_Sigma_Scale})") # D0_Mass Signal w.factory("RooCBShape::Norm_D0M_Sig_Gaus1(D0_Mass,Norm_D0M_Sig_Gaus_Mean[1867,1850,1880],Norm_D0M_Sig_Gaus1_Sigma_Scaled,Norm_D0M_Sig_Gaus1_alpha[1.5,0,6],Norm_D0M_Sig_Gaus1_n[2,0,20])") #w.factory("RooGaussian::Norm_D0M_Sig_Gaus1(D0_Mass,Norm_D0M_Sig_Gaus_Mean[1867,1850,1880],Norm_D0M_Sig_Gaus1_Sigma_Scaled)") w.factory("RooCBShape::Norm_D0M_Sig_Gaus2(D0_Mass,Norm_D0M_Sig_Gaus_Mean,Norm_D0M_Sig_Gaus2_Sigma_Scaled,Norm_D0M_Sig_Gaus2_alpha[1.5,0,6],Norm_D0M_Sig_Gaus2_n[2,0,20])") #w.factory("RooGaussian::Norm_D0M_Sig_Gaus2(D0_Mass,Norm_D0M_Sig_Gaus_Mean,Norm_D0M_Sig_Gaus2_Sigma_Scaled)") #w.factory("RooGaussian::Norm_D0M_Sig_Gaus3(D0_Mass,Norm_D0M_Sig_Gaus3_Mean[1867,1850,1880],Norm_D0M_Sig_Gaus3_Sigma_Scaled)") w.factory("RooGaussian::Norm_D0M_Sig_Gaus3(D0_Mass,Norm_D0M_Sig_Gaus_Mean,Norm_D0M_Sig_Gaus3_Sigma_Scaled)") #w.factory("RooCBShape::Norm_D0M_Sig_Gaus3(D0_Mass,Norm_D0M_Sig_Gaus_Mean,Norm_D0M_Sig_Gaus3_Sigma_Scaled,Norm_D0M_Sig_Gaus3_alpha[1.5,0,6],Norm_D0M_Sig_Gaus3_n[0.5,0,20])") #w.factory("SUM::Norm_D0M_Sig(Norm_D0M_Sig_Gaus1_Frac[0.4,0,1]*Norm_D0M_Sig_Gaus1,Norm_D0M_Sig_Gaus3_Frac[0.1,0,1]*Norm_D0M_Sig_Gaus3,Norm_D0M_Sig_Gaus2)") w.factory("SUM::Norm_D0M_Sig(Norm_D0M_Sig_Gaus1_Frac[0.4,0,1]*Norm_D0M_Sig_Gaus1,Norm_D0M_Sig_Gaus2)") #w.factory("PROD::Norm_D0M_Sig(Norm_D0M_Sig_Sum,Norm_D0M_Range)") # D0_Mass MisId #w.factory("RooGaussian::Norm_D0M_MisId_Gaus1(D0_Mass,Norm_D0M_MisId_Gaus_Mean[1790,1720,1820],Norm_D0M_Sig_Gaus1_Sigma)") #w.factory("RooGaussian::Norm_D0M_MisId_Gaus2(D0_Mass,Norm_D0M_MisId_Gaus_Mean,Norm_D0M_Sig_Gaus2_Sigma)") #w.factory("SUM::Norm_D0M_MisId(Norm_D0M_Sig_Gaus1_Frac*Norm_D0M_MisId_Gaus1,Norm_D0M_MisId_Gaus2)") ##w.factory("PROD::Norm_D0M_MisId(Norm_D0M_MisId_Sum,Norm_D0M_Range)") ##w.factory("RooExponential::Norm_D0M_MisId_Exp(D0_Mass,Norm_D0M_MisId_Exp_Const[-0.15,-.3,-.1])") ##w.factory("PROD::Norm_D0M_MisId(Norm_D0M_MisId_Exp,Norm_D0M_Range)") # D0_Mass Combinatorical w.factory('{Norm_D0M_Bkg_Cheby_1[-0.5,-1,1]}') #w.factory("RooChebychev::Norm_D0M_Bkg_Poly(D0_Mass,{Norm_D0M_Bkg_Cheby_1[-0.25,-1.5,1]})") #w.factory("PROD::Norm_D0M_Bkg(Norm_D0M_Bkg_Poly,Norm_D0M_Range)") w.factory("RooExponential::Norm_D0M_Bkg(D0_Mass,Norm_D0M_Bkg_Exp_c[-0.0088,-0.05,-0.001])") #w.factory("RooChebychev::Norm_D0M_Bkg(D0_Mass,{Norm_D0M_Bkg_Cheby_1})") #w.factory("RooChebychev::Norm_D0M_Bkg(D0_Mass,{Norm_D0M_Bkg_Cheby_1,Norm_D0M_Bkg_Cheby_2[-0.1,-0.7,1]})") w.factory("RooFormulaVar::Norm_DelM_Sig_Gaus_Mean_Shifted('Norm_DelM_Sig_Gaus_Mean+Norm_DelM_Sig_Gaus_Mean_Shift',{Norm_DelM_Sig_Gaus_Mean[145.5,145,146],Norm_DelM_Sig_Gaus_Mean_Shift[0]})") w.factory("RooFormulaVar::Norm_DelM_Sig_Gaus3_Mean_Shifted('Norm_DelM_Sig_Gaus3_Mean+Norm_DelM_Sig_Gaus_Mean_Shift',{Norm_DelM_Sig_Gaus3_Mean[145.7,144,155],Norm_DelM_Sig_Gaus_Mean_Shift})") w.factory("RooFormulaVar::Norm_DelM_Sig_Gaus2_Sigma('Norm_DelM_Sig_Gaus1_Sigma+Norm_DelM_Sig_Gaus2_Sigma_Diff',{Norm_DelM_Sig_Gaus1_Sigma[.4,0,1],Norm_DelM_Sig_Gaus2_Sigma_Diff[0.4,0.,1.]})") w.factory("RooFormulaVar::Norm_DelM_Sig_Gaus3_Sigma('Norm_DelM_Sig_Gaus1_Sigma+Norm_DelM_Sig_Gaus2_Sigma_Diff+Norm_DelM_Sig_Gaus3_Sigma_Diff',{Norm_DelM_Sig_Gaus1_Sigma,Norm_DelM_Sig_Gaus2_Sigma_Diff,Norm_DelM_Sig_Gaus3_Sigma_Diff[0.4,0.,3.]})") # Del_Mass signal w.factory("{Norm_DelM_Sig_Gaus3_Frac[0.01,0,.7]}") #w.factory("RooCBShape::Norm_DelM_Sig_Gaus1(Del_Mass,Norm_DelM_Sig_Gaus_Mean_Shifted,Norm_DelM_Sig_Gaus1_Sigma[.4,0,1],Norm_DelM_Sig_CB1_alpha[1.5,0,6], BDT%(n)i_D0M_Sig_CB1_n[2,0,10] )") w.factory("RooGaussian::Norm_DelM_Sig_Gaus1(Del_Mass,Norm_DelM_Sig_Gaus_Mean_Shifted,Norm_DelM_Sig_Gaus1_Sigma)") w.factory("RooGaussian::Norm_DelM_Sig_Gaus2(Del_Mass,Norm_DelM_Sig_Gaus_Mean_Shifted,Norm_DelM_Sig_Gaus2_Sigma)") w.factory("RooGaussian::Norm_DelM_Sig_Gaus3(Del_Mass,Norm_DelM_Sig_Gaus3_Mean_Shifted,Norm_DelM_Sig_Gaus3_Sigma)") w.factory("SUM::Norm_DelM_Sig(Norm_DelM_Sig_Gaus1_Frac[0.1,0,.7]*Norm_DelM_Sig_Gaus1,Norm_DelM_Sig_Gaus3_Frac*Norm_DelM_Sig_Gaus3,Norm_DelM_Sig_Gaus2)") #w.factory("SUM::Norm_DelM_Sig(Norm_DelM_Sig_Gaus1_Frac[0.1,0,.7]*Norm_DelM_Sig_Gaus1,Norm_DelM_Sig_Gaus2)") # mis recod w.factory("RooGaussian::Norm_DelM_MisRecod_Gaus1(Del_Mass,Norm_DelM_MisRecod_Gaus_Mean[145.5,145,146],Norm_DelM_MisRecod_Gaus_Sigma1[1.2,0,5] )") w.factory("RooChebychev::Norm_D0M_MisRecod(D0_Mass,{Norm_D0M_MisRecod_Cheby_1[0,-1,1]})") # Del_Mass Combinatorical #w.factory("RooDstD0BG::Norm_DelM_Bkg(Del_Mass,Norm_DelM_Bkg_m0[139.5,137.5,140.5],Norm_DelM_Bkg_c[40,7,350],Norm_DelM_Bkg_a[-20,-100,-1],Norm_DelM_Bkg_b[0.4,-1,2])") w.factory("RooDstD0BG::Norm_DelM_Bkg(Del_Mass,Norm_DelM_Bkg_m0[139.5,137.5,140.5],Norm_DelM_Bkg_m0,Norm_DelM_Bkg_a[-20,-100,-1],Norm_DelM_Bkg_b[0.4,-1,2])") w.factory("{Norm_DelM_Bkg_c[40,7,350]}") # Del_Mass signal w.factory("RooGaussian::Norm_DelM_MisId(Del_Mass,Norm_DelM_Sig_Gaus_Mean,Norm_DelM_MisId_Gaus_Sigma1[1,0,3])") w.factory("PROD::Norm_Sig(Norm_DelM_Sig,Norm_D0M_Sig)") w.factory("PROD::Norm_Comb(Norm_DelM_Bkg,Norm_D0M_Bkg)") w.factory("PROD::Norm_MisRecod(Norm_DelM_MisRecod_Gaus1,Norm_D0M_MisRecod)") #w.factory("PROD::Norm_MisId(Norm_DelM_MisId,Norm_D0M_MisId)") #w.factory("PROD::Norm_MisId(Norm_DelM_Sig,Norm_D0M_MisId)") #w.factory("PROD::Norm_MisId_Prompt(Norm_DelM_Bkg,Norm_D0M_MisId)") w.factory("PROD::Norm_Prompt(Norm_DelM_Bkg,Norm_D0M_Sig)") w.factory("{Norm_N_Sig[65000,20000,500000],Norm_N_MisId[1300,100,3000],Norm_N_MisRecod[5000,100,30000],Norm_N_MisId_Prompt[500,10,1000]}") # --- eMu --- #w.factory("EMu_N_Sig[1000,0,100000]") for n in (1,2,3): w.factory("BDT%(n)i_Sig_Eff[0.3,0,1]"%({"n":n})) w.factory("EMu_Eff[%f]"%(config['emuEff'])) w.factory("EMu_BR[1e-8,-1e-7,1e-7]") if config['norm'] is 'pipi': w.factory("Norm_Eff[%f]"%(config['pipiEff'])) w.factory("Norm_BR[%f]"%(config['pipiBR'][0])) w.obj("Norm_BR").setError(config['pipiBR'][1]) elif config['norm'] is 'kpi': w.factory("Norm_Eff[%f]"%(config['kpiEff'])) w.factory("Norm_BR[%f]"%(config['kpiBR'][0])) w.obj("Norm_BR").setError(config['kpiBR'][1]) w.factory("RooFormulaVar::N_PiPi('Norm_N_Sig*(%f)',{Norm_N_Sig})"%(config['pipiAsEmuEff']*config['pipiBR'][0]/config['kpiBR'][0]/config['kpiEff'],)) w.factory("RooFormulaVar::EMu_N_Sig('abs(Norm_N_Sig*((EMu_BR*EMu_Eff)/(Norm_BR*Norm_Eff)))',{Norm_BR,EMu_BR,EMu_Eff,Norm_Eff,Norm_N_Sig})") w.factory("{EMu_D0M_Min[1815],EMu_D0M_Max[1915]}") w.factory("RooGaussian::Norm_Constraint(Norm_N_Sig,%f,%f)"%(config["normEvents"][0],config["normEvents"][1])) # D0_Mass Combinatorical w.factory("RooGenericPdf::BDT_D0M_Blind('(D0_Mass<1700||D0_Mass>1900)',{D0_Mass})") w.factory("RooGenericPdf::BDT_D0M_Range('(D0_Mass>EMu_D0M_Min&&D0_Mass<EMu_D0M_Max)',{D0_Mass,EMu_D0M_Min,EMu_D0M_Max})") w.factory("RooChebychev::BDT_D0M_Bkg(D0_Mass,{BDT_D0M_Bkg_Cheby_1[-0.7,-3.0,0.0],BDT_D0M_Bkg_Cheby_2[-0.2,-3.0,0.0]})") w.factory("PROD::BDT_D0M_Bkg_Blind(BDT_D0M_Bkg,BDT_D0M_Blind)") # Del_Mass Combinatorical w.factory("RooDstD0BG::BDT_DelM_Bkg(Del_Mass,BDT_DelM_Bkg_m0[139.5,137.5,140.5],BDT_DelM_Bkg_c[40,7,350],BDT_DelM_Bkg_a[-20,-100,-1],BDT_DelM_Bkg_b[-0.1,-2,1])"%({"n":n})) w.factory("{BDT_D0M_Sig_CB1_alphaleft[0.3,0,1]}") w.factory("{BDT_D0M_Sig_CB2_alpharight[-0.5,-5,0]}") for n in (1,2,3): if n is not 3: w.factory("RooFormulaVar::BDT%(n)i_N_Sig('EMu_N_Sig*BDT%(n)i_Sig_Eff',{EMu_N_Sig,BDT%(n)i_Sig_Eff})"%({"n":n})) else: w.factory("RooFormulaVar::BDT%(n)i_N_Sig('EMu_N_Sig*(1-(BDT1_Sig_Eff+BDT2_Sig_Eff))',{EMu_N_Sig,BDT1_Sig_Eff,BDT2_Sig_Eff})"%({"n":n})) # D0_Mass Signal w.factory("{BDT%(n)i_D0M_Sig_CB2_alpharight[-0.5,-5,0],BDT%(n)i_D0M_Sig_CB1_alphaleft[0.3,0,1]}"%({"n":n})) w.factory("RooCBShape:BDT%(n)i_D0M_Sig_CB1(D0_Mass, BDT%(n)i_D0M_Sig_CB_Mean[1850,1750,1900], BDT%(n)i_D0M_Sig_CB1_Sigma[10,1,30], BDT_D0M_Sig_CB1_alphaleft, BDT%(n)i_D0M_Sig_CB1_n[2,0,10])"%({"n":n})) w.factory("RooCBShape:BDT%(n)i_D0M_Sig_CB2(D0_Mass, BDT%(n)i_D0M_Sig_CB_Mean, BDT%(n)i_D0M_Sig_CB2_Sigma[3,1,30], BDT_D0M_Sig_CB2_alpharight, BDT%(n)i_D0M_Sig_CB2_n[5,0,50])"%({"n":n})) w.factory("SUM::BDT%(n)i_D0M_Sig(BDT%(n)i_D0M_Sig_CB1_Frac[0.8,0,1]*BDT%(n)i_D0M_Sig_CB1,BDT%(n)i_D0M_Sig_CB2)"%({"n":n})) # Del_Mass signal w.factory("{BDT%(n)i_DelM_Sig_Gaus1_Frac[0.75,0,1]}"%({"n":n})) w.factory("RooGaussian::BDT%(n)i_DelM_Sig_Gaus1(Del_Mass,BDT%(n)i_DelM_Sig_Gaus_Mean[145.5,143,148],BDT%(n)i_DelM_Sig_Gaus_Sigma1[1,0,5] )"%({"n":n})) w.factory("RooGaussian::BDT%(n)i_DelM_Sig_Gaus2(Del_Mass,BDT%(n)i_DelM_Sig_Gaus_Mean,BDT%(n)i_DelM_Sig_Gaus_Sigma2[.1,0,2] )"%({"n":n})) #w.factory("{BDT%(n)i_DelM_Sig_Gaus3_Frac[0.05,0,0.1],BDT%(n)i_DelM_Sig_Gaus_Mean_2[148,143,152],BDT%(n)i_DelM_Sig_Gaus_Sigma3[10,0,20]}"%({"n":n})) w.factory("RooGaussian::BDT%(n)i_DelM_Sig_Gaus3(Del_Mass,BDT%(n)i_DelM_Sig_Gaus_Mean_3[148,143,152],BDT%(n)i_DelM_Sig_Gaus_Sigma3[10,0,20] )"%({"n":n})) w.factory("SUM::BDT%(n)i_DelM_Sig(BDT%(n)i_DelM_Sig_Gaus3_Frac[0.05,0,0.1]*BDT%(n)i_DelM_Sig_Gaus3,BDT%(n)i_DelM_Sig_Gaus2_Frac[0.2,0,1]*BDT%(n)i_DelM_Sig_Gaus2,BDT%(n)i_DelM_Sig_Gaus1)"%({"n":n})) #w.factory("SUM::BDT%(n)i_DelM_Sig(BDT%(n)i_DelM_Sig_Gaus2_Frac[0.2,0,1]*BDT%(n)i_DelM_Sig_Gaus2,BDT%(n)i_DelM_Sig_Gaus1)"%({"n":n})) w.factory("PROD::BDT%(n)i_Sig(BDT%(n)i_DelM_Sig,BDT%(n)i_D0M_Sig)"%({"n":n})) w.factory("PROD::BDT%(n)i_Comb_Blind(BDT_DelM_Bkg,BDT_D0M_Bkg_Blind)"%({"n":n})) w.factory("PROD::BDT%(n)i_Comb(BDT_DelM_Bkg,BDT_D0M_Bkg)"%({"n":n})) w.factory("{BDT1_PiPi_Eff[0.5,0,1],BDT2_PiPi_Eff[0.3,0,1]}") w.factory("{BDT_D0M_PiPi_CB2_alpharight[-0.5,-5,0],BDT_D0M_PiPi_CB1_alphaleft[0.8,0,3]}") for n in (1,2,3): if n is not 3: w.factory("RooFormulaVar::BDT%(n)i_N_PiPi('N_PiPi*BDT%(n)i_PiPi_Eff',{N_PiPi,BDT%(n)i_PiPi_Eff})"%({"n":n})) else: w.factory("RooFormulaVar::BDT%(n)i_N_PiPi('N_PiPi*(1-(BDT1_PiPi_Eff+BDT2_PiPi_Eff))',{N_PiPi,BDT1_PiPi_Eff,BDT2_PiPi_Eff})"%({"n":n})) # D0_Mass PiPi w.factory("{BDT%(n)i_D0M_PiPi_CB2_alpharight[-0.5,-5,0],BDT%(n)i_D0M_PiPi_CB1_alphaleft[0.8,0,3]}"%({"n":n})) w.factory("RooCBShape:BDT%(n)i_D0M_PiPi_CB1(D0_Mass, BDT%(n)i_D0M_PiPi_CB_Mean[1850,1750,1900], BDT%(n)i_D0M_PiPi_CB1_Sigma[10,1,30], BDT_D0M_PiPi_CB1_alphaleft, BDT%(n)i_D0M_PiPi_CB1_n[2,0,10])"%({"n":n})) w.factory("RooCBShape:BDT%(n)i_D0M_PiPi_CB2(D0_Mass, BDT%(n)i_D0M_PiPi_CB_Mean, BDT%(n)i_D0M_PiPi_CB2_Sigma[3,1,30], BDT_D0M_PiPi_CB2_alpharight, BDT%(n)i_D0M_PiPi_CB2_n[5,0,50])"%({"n":n})) w.factory("SUM::BDT%(n)i_D0M_PiPi(BDT%(n)i_D0M_PiPi_CB1_Frac[0.8,0,1]*BDT%(n)i_D0M_PiPi_CB1,BDT%(n)i_D0M_PiPi_CB2)"%({"n":n})) # Del_Mass signal w.factory("{BDT%(n)i_DelM_PiPi_Gaus1_Frac[0.75,0,1]}"%({"n":n})) w.factory("RooGaussian::BDT%(n)i_DelM_PiPi_Gaus1(Del_Mass,BDT%(n)i_DelM_PiPi_Gaus_Mean[145.5,143,148],BDT%(n)i_DelM_PiPi_Gaus_Sigma1[1,0,5] )"%({"n":n})) w.factory("RooGaussian::BDT%(n)i_DelM_PiPi_Gaus2(Del_Mass,BDT%(n)i_DelM_PiPi_Gaus_Mean_2[145.5,143,148],BDT%(n)i_DelM_PiPi_Gaus_Sigma2[.1,0,2] )"%({"n":n})) #w.factory("{BDT%(n)i_DelM_PiPi_Gaus3_Frac[0.05,0,0.1],BDT%(n)i_DelM_PiPi_Gaus_Mean_2[148,143,152],BDT%(n)i_DelM_PiPi_Gaus_Sigma3[10,0,20]}"%({"n":n})) w.factory("RooGaussian::BDT%(n)i_DelM_PiPi_Gaus3(Del_Mass,BDT%(n)i_DelM_PiPi_Gaus_Mean_3[148,143,152],BDT%(n)i_DelM_PiPi_Gaus_Sigma3[10,0,20] )"%({"n":n})) w.factory("SUM::BDT%(n)i_DelM_PiPi(BDT%(n)i_DelM_PiPi_Gaus3_Frac[0.05,0,0.1]*BDT%(n)i_DelM_PiPi_Gaus3,BDT%(n)i_DelM_PiPi_Gaus2_Frac[0.2,0,1]*BDT%(n)i_DelM_PiPi_Gaus2,BDT%(n)i_DelM_PiPi_Gaus1)"%({"n":n})) #w.factory("SUM::BDT%(n)i_DelM_Sig(BDT%(n)i_DelM_PiPi_Gaus2_Frac[0.2,0,1]*BDT%(n)i_DelM_PiPi_Gaus2,BDT%(n)i_DelM_PiPi_Gaus1)"%({"n":n})) w.factory("PROD::BDT%(n)i_PiPi(BDT%(n)i_DelM_PiPi,BDT%(n)i_D0M_PiPi)"%({"n":n})) #w.factory("SUM::BDT%(n)i_Final_PDF_Blind(BDT%(n)i_N_Sig*BDT%(n)i_Sig,BDT%(n)i_N_Comb[1000,0,10000]*BDT%(n)i_Comb_Blind)"%({"n":n})) #w.factory("SUM::BDT%(n)i_Final_PDF(BDT%(n)i_N_Sig*BDT%(n)i_Sig,BDT%(n)i_N_Comb*BDT%(n)i_Comb)"%({"n":n})) w.factory("SUM::BDT%(n)i_Final_PDF_Blind(BDT%(n)i_N_Sig*BDT%(n)i_Sig,BDT%(n)i_N_Comb[1000,0,10000]*BDT%(n)i_Comb_Blind,BDT%(n)i_N_PiPi*BDT%(n)i_PiPi)"%({"n":n})) w.factory("SUM::BDT%(n)i_Final_PDF(BDT%(n)i_N_Sig*BDT%(n)i_Sig,BDT%(n)i_N_Comb*BDT%(n)i_Comb,BDT%(n)i_N_PiPi*BDT%(n)i_PiPi)"%({"n":n})) w.factory("PROD::BDT%(n)i_Final_PDF_Constrained(BDT%(n)i_Final_PDF,Norm_Constraint)"%({"n":n})) #w.obj('Norm_D0M_Sig_Gaus1_Frac').setMin(0.0) ; w.obj('Norm_D0M_Sig_Gaus1_Frac').setMax(1.0) #w.obj('Norm_D0M_Sig_Gaus1_Frac').setVal(0.429624062534) ; w.obj('Norm_D0M_Sig_Gaus1_Frac').setError(0.0289511133792) #w.obj('Norm_D0M_Sig_Gaus1_Frac').setConstant(False) #w.obj('Norm_D0M_Sig_Gaus_Mean').setMin(1850.0) ; w.obj('Norm_D0M_Sig_Gaus_Mean').setMax(1880.0) #w.obj('Norm_D0M_Sig_Gaus_Mean').setVal(1867.01515277) ; w.obj('Norm_D0M_Sig_Gaus_Mean').setError(0.0296569841856) #w.obj('Norm_D0M_Sig_Gaus_Mean').setConstant(False) #w.obj('Norm_D0M_Sig_Gaus1_Sigma').setMin(0.0) ; w.obj('Norm_D0M_Sig_Gaus1_Sigma').setMax(10.0) #w.obj('Norm_D0M_Sig_Gaus1_Sigma').setVal(6.92118344347) ; w.obj('Norm_D0M_Sig_Gaus1_Sigma').setError(0.117795059995) #w.obj('Norm_D0M_Sig_Gaus1_Sigma').setConstant(False) #w.obj('Norm_D0M_Sig_Gaus2_Sigma').setMin(5.0) ; w.obj('Norm_D0M_Sig_Gaus2_Sigma').setMax(20.0) #w.obj('Norm_D0M_Sig_Gaus2_Sigma').setVal(10.3140938882) ; w.obj('Norm_D0M_Sig_Gaus2_Sigma').setError(0.117955520203) #w.obj('Norm_D0M_Sig_Gaus2_Sigma').setConstant(False) #w.obj('Norm_DelM_Bkg_a').setMin(-100.0) ; w.obj('Norm_DelM_Bkg_a').setMax(-1.0) #w.obj('Norm_DelM_Bkg_a').setVal(-16.1932460031) ; w.obj('Norm_DelM_Bkg_a').setError(0.43302849663) #w.obj('Norm_DelM_Bkg_a').setConstant(False) #w.obj('Norm_DelM_Bkg_b').setMin(-0.5) ; w.obj('Norm_DelM_Bkg_b').setMax(2.0) #w.obj('Norm_DelM_Bkg_b').setVal(0.178920942238) ; w.obj('Norm_DelM_Bkg_b').setError(0.0376477247211) #w.obj('Norm_DelM_Bkg_b').setConstant(False) #w.obj('Norm_DelM_Bkg_c').setMin(7.0) ; w.obj('Norm_DelM_Bkg_c').setMax(350.0) #w.obj('Norm_DelM_Bkg_c').setVal(36.1602832374) ; w.obj('Norm_DelM_Bkg_c').setError(5.19925002062) #w.obj('Norm_DelM_Bkg_c').setConstant(False) #w.obj('Norm_DelM_Bkg_m0').setMin(137.5) ; w.obj('Norm_DelM_Bkg_m0').setMax(140.5) #w.obj('Norm_DelM_Bkg_m0').setVal(139.316358242) ; w.obj('Norm_DelM_Bkg_m0').setError(5.10021351516e-05) #w.obj('Norm_DelM_Bkg_m0').setConstant(False) #w.obj('Norm_DelM_Sig_Gaus1_Frac').setMin(0.0) ; w.obj('Norm_DelM_Sig_Gaus1_Frac').setMax(1.) #w.obj('Norm_DelM_Sig_Gaus1_Frac').setVal(0.279248861884) ; w.obj('Norm_DelM_Sig_Gaus1_Frac').setError(0.0191547718614) #w.obj('Norm_DelM_Sig_Gaus1_Frac').setConstant(False) #w.obj('Norm_DelM_Sig_Gaus_Mean').setMin(145.0) ; w.obj('Norm_DelM_Sig_Gaus_Mean').setMax(146.0) #w.obj('Norm_DelM_Sig_Gaus_Mean').setVal(145.448069656) ; w.obj('Norm_DelM_Sig_Gaus_Mean').setError(0.00294967951486) #w.obj('Norm_DelM_Sig_Gaus_Mean').setConstant(False) #w.obj('Norm_DelM_Sig_Gaus1_Sigma').setMin(0.0) ; w.obj('Norm_DelM_Sig_Gaus1_Sigma').setMax(1.0) #w.obj('Norm_DelM_Sig_Gaus1_Sigma').setVal(0.429900766218) ; w.obj('Norm_DelM_Sig_Gaus1_Sigma').setError(0.0119155696871) #w.obj('Norm_DelM_Sig_Gaus1_Sigma').setConstant(False) #w.obj('Norm_DelM_Sig_Gaus2_Sigma').setMin(0.1) ; w.obj('Norm_DelM_Sig_Gaus2_Sigma').setMax(2.0) #w.obj('Norm_DelM_Sig_Gaus2_Sigma').setVal(0.827483577936) ; w.obj('Norm_DelM_Sig_Gaus2_Sigma').setError(0.00898522299303) #w.obj('Norm_DelM_Sig_Gaus2_Sigma').setConstant(False) if config['mode'] == 'mc': w.factory("SIMUL::Final_PDF(DataSet,BDT1=BDT1_Sig,BDT2=BDT2_Sig,BDT3=BDT3_Sig)") #w.obj("Final_PDF").Print("v") #w.obj("BDT1_Sig").Print("v") elif config['mode'] == 'mcpipi': w.factory("SIMUL::Final_PDF(DataSet,BDT1=BDT1_PiPi,BDT2=BDT2_PiPi,BDT3=BDT3_PiPi)") elif config['mode'] == 'datapretoy': w.factory("SIMUL::Final_PDF(DataSet,BDT1=BDT1_Comb_Blind,BDT2=BDT2_Comb_Blind,BDT3=BDT3_Comb_Blind)") elif config['mode'] == 'toy': w.factory("SIMUL::Final_PDF(DataSet,Norm=Norm_Final_PDF,BDT1=BDT1_Final_PDF,BDT2=BDT2_Final_PDF,BDT3=BDT3_Final_PDF)") w.factory("SIMUL::Final_PDF_Background(DataSet,Norm=Norm_Final_PDF,BDT1=BDT1_Comb,BDT2=BDT2_Comb,BDT3=BDT3_Comb)") w.factory("SIMUL::Final_PDF_Constrained(DataSet,BDT1=BDT1_Final_PDF_Constrained,BDT2=BDT2_Final_PDF_Constrained,BDT3=BDT3_Final_PDF_Constrained)") elif config['mode'] == 'mcnorm': w.factory("SIMUL::Final_PDF(DataSet,Norm=Norm_Sig)") elif config['mode'] == 'norm': #w.factory("SUM::Norm_Final_PDF(Norm_N_Sig[65000,20000,110000]*Norm_Sig,Norm_N_Prompt[35000,20000,60000]*Norm_Prompt,Norm_N_Comb[67000,1000,90000]*Norm_Comb,Norm_N_MisId[1300,100,3000]*Norm_MisId,Norm_N_MisId_Prompt[500,10,1000]*Norm_MisId_Prompt)") #w.factory("SUM::Norm_Final_PDF(Norm_N_Sig[65000,20000,110000]*Norm_Sig,Norm_N_Prompt[35000,20000,60000]*Norm_Prompt,Norm_N_Comb[67000,1000,90000]*Norm_Comb,Norm_N_MisId[1300,100,3000]*Norm_MisId)") #w.factory("SUM::Norm_Final_PDF(Norm_N_Sig[65000,20000,110000]*Norm_Sig,Norm_N_Prompt[35000,20000,60000]*Norm_Prompt,Norm_N_Comb[67000,1000,90000]*Norm_Comb,Norm_N_MisId_Prompt[500,10,1000]*Norm_MisId_Prompt)") #w.factory("SUM::Norm_Final_PDF(Norm_N_Sig*Norm_Sig,Norm_N_Prompt[35000,20000,60000]*Norm_Prompt,Norm_N_Comb[67000,1000,90000]*Norm_Comb,Norm_N_MisRecod*Norm_MisRecod)") w.factory("SUM::Norm_Final_PDF(Norm_N_Sig*Norm_Sig,Norm_N_Prompt[35000,20000,60000]*Norm_Prompt,Norm_N_Comb[67000,1000,90000]*Norm_Comb)") w.factory("SIMUL::Final_PDF(DataSet,Norm=Norm_Final_PDF)") elif config['mode'] == 'data': #w.factory("SUM::Norm_Final_PDF(Norm_N_Sig[65000,20000,110000]*Norm_Sig,Norm_N_Prompt[35000,20000,60000]*Norm_Prompt,Norm_N_Comb[67000,1000,90000]*Norm_Comb,Norm_N_MisId[1300,100,3000]*Norm_MisId,Norm_N_MisId_Prompt[500,10,1000]*Norm_MisId_Prompt)") #w.factory("SUM::Norm_Final_PDF(Norm_N_Sig[65000,20000,110000]*Norm_Sig,Norm_N_Prompt[35000,20000,60000]*Norm_Prompt,Norm_N_Comb[67000,1000,90000]*Norm_Comb,Norm_N_MisId[1300,100,3000]*Norm_MisId)") #w.factory("SUM::Norm_Final_PDF(Norm_N_Sig[65000,20000,110000]*Norm_Sig,Norm_N_Prompt[35000,20000,60000]*Norm_Prompt,Norm_N_Comb[67000,1000,90000]*Norm_Comb,Norm_N_MisId_Prompt[500,10,1000]*Norm_MisId_Prompt)") w.factory("SUM::Norm_Final_PDF(Norm_N_Sig*Norm_Sig,Norm_N_Prompt[35000,20000,60000]*Norm_Prompt,Norm_N_Comb[67000,1000,90000]*Norm_Comb)") w.factory("SIMUL::Final_PDF(DataSet,Norm=Norm_Final_PDF,BDT1=BDT1_Final_PDF,BDT2=BDT2_Final_PDF,BDT3=BDT3_Final_PDF)") w.factory("SIMUL::Final_PDF_Background(DataSet,Norm=Norm_Final_PDF,BDT1=BDT1_Comb,BDT2=BDT2_Comb,BDT3=BDT3_Comb)") w.factory("SIMUL::Final_PDF_Constrained(DataSet,BDT1=BDT1_Final_PDF_Constrained,BDT2=BDT2_Final_PDF_Constrained,BDT3=BDT3_Final_PDF_Constrained)") w.obj('Norm_N_Comb').setMin(2000.0*config["normScale"]) ; w.obj('Norm_N_Comb').setMax(15000.0*config["normScale"]) w.obj('Norm_N_Comb').setVal(7850.56516616*config["normScale"]) ; w.obj('Norm_N_Comb').setError(177.821454726/sqrt(config["normScale"])) w.obj('Norm_N_MisRecod').setMin(100.0*config["normScale"]) ; w.obj('Norm_N_MisRecod').setMax(40000.0*config["normScale"]) w.obj('Norm_N_MisRecod').setVal(20316.944222*config["normScale"]) ; w.obj('Norm_N_MisRecod').setError(248.324102117/sqrt(config["normScale"])) w.obj('Norm_N_Prompt').setMin(6000.0*config["normScale"]) ; w.obj('Norm_N_Prompt').setMax(60000.0*config["normScale"]) w.obj('Norm_N_Prompt').setVal(29326.825063*config["normScale"]) ; w.obj('Norm_N_Prompt').setError(247.656992623/sqrt(config["normScale"])) w.obj('Norm_N_Sig').setMin(2000.0*config["normScale"]) ; w.obj('Norm_N_Sig').setMax(270000.0*config["normScale"]) w.obj('Norm_N_Sig').setVal(135370.*config["normScale"]) ; w.obj('Norm_N_Sig').setError(430./sqrt(config["normScale"])) if mode_later_than('mcnorm',config['mode']): # nll analyse, nlls up to 1610.0 on Thu Jul 31 15:14:35 2014 run on fitResult.norm_kpi_2011_6.0.0.4.root # # WARNING derivitive_ratio bottom is zero 169 689.330300 696.763943 696.763943 # INFO extrapolate lower edge [False, -0.2368609160806532, 1610.0] # INFO extrapolate upper edge [False, 0.300022797868122, 1610.0] w.obj('Norm_D0M_Bkg_Exp_c').setMin(-0.236860916081) ; w.obj('Norm_D0M_Bkg_Exp_c').setMax(0.300022797868) ; w.obj('Norm_D0M_Bkg_Exp_c').setVal(-0.0221074305011) ; w.obj('Norm_D0M_Bkg_Exp_c').setError(0.00127007751403) # WARNING derivitive_ratio bottom is zero 169 5199.217924 5509.102918 5509.102918 w.obj('Norm_D0M_Sig_Gaus1_Sigma').setMin(5.8) ; w.obj('Norm_D0M_Sig_Gaus1_Sigma').setMax(8.6) ; w.obj('Norm_D0M_Sig_Gaus1_Sigma').setVal(7.2) ; w.obj('Norm_D0M_Sig_Gaus1_Sigma').setError(0.0305815254416) # WARNING derivitive_ratio bottom is zero 119 62.077791 62.077791 62.077791 # WARNING Dont want to expand max range 99.112383 too far # INFO extrapolate upper edge [False, 10.5, 1610.0] # WARNING upper edge 10.500000 hits limit 5.000000 w.obj('Norm_D0M_Sig_Gaus1_alpha').setMin(1.14) ; w.obj('Norm_D0M_Sig_Gaus1_alpha').setMax(5.0) ; w.obj('Norm_D0M_Sig_Gaus1_alpha').setVal(2.298) ; w.obj('Norm_D0M_Sig_Gaus1_alpha').setError(0.0393012987558) # WARNING Dont want to expand max range 899.409174 too far # INFO extrapolate upper edge [False, 35.0, 1610.0] # WARNING upper edge 35.000000 hits limit 20.000000 w.obj('Norm_D0M_Sig_Gaus1_n').setMin(0.3375) ; w.obj('Norm_D0M_Sig_Gaus1_n').setMax(20.0) ; w.obj('Norm_D0M_Sig_Gaus1_n').setVal(2.30375) ; w.obj('Norm_D0M_Sig_Gaus1_n').setError(0.537418026352) # WARNING derivitive_ratio bottom is zero 127 1374.893616 1418.388895 1418.388895 # INFO extrapolate upper edge [False, 10.856461576572277, 1610.0] w.obj('Norm_D0M_Sig_Gaus2_Sigma_Diff').setMin(1.25) ; w.obj('Norm_D0M_Sig_Gaus2_Sigma_Diff').setMax(10.8564615766) ; w.obj('Norm_D0M_Sig_Gaus2_Sigma_Diff').setVal(5.09258463063) ; w.obj('Norm_D0M_Sig_Gaus2_Sigma_Diff').setError(0.115261989641) # WARNING derivitive_ratio bottom is zero 100 768.841852 768.841850 768.841850 # WARNING derivitive_ratio bottom is zero 101 768.841850 768.841850 768.841850 # WARNING derivitive_ratio bottom is zero 102 768.841850 768.841850 768.841850 # WARNING derivitive_ratio bottom is zero 103 768.841850 768.841850 768.841850 # WARNING derivitive_ratio bottom is zero 104 768.841850 768.841850 768.841850 # WARNING derivitive_ratio bottom is zero 105 768.841850 768.841850 768.841850 # WARNING derivitive_ratio bottom is zero 106 768.841850 768.841850 768.841850 # WARNING derivitive_ratio bottom is zero 107 768.841850 768.841850 768.841850 # WARNING derivitive_ratio bottom is zero 108 768.841850 768.841850 768.841850 # INFO extrapolate upper edge [False, 10.11481217459233, 1610.0] # WARNING upper edge 10.114812 hits limit 5.000000 w.obj('Norm_D0M_Sig_Gaus2_alpha').setMin(1.44) ; w.obj('Norm_D0M_Sig_Gaus2_alpha').setMax(5.0) ; w.obj('Norm_D0M_Sig_Gaus2_alpha').setVal(2.152) ; w.obj('Norm_D0M_Sig_Gaus2_alpha').setError(0.0415387821092) # WARNING derivitive_ratio bottom is zero 230 629.750229 630.182190 630.182190 # INFO extrapolate lower edge [False, -0.9760496583225814, 1610.0] # WARNING Dont want to expand max range 50.491612 too far # INFO extrapolate upper edge [False, 35.0, 1610.0] # WARNING lower edge -0.976050 hits limit 0.000000 # WARNING upper edge 35.000000 hits limit 20.000000 w.obj('Norm_D0M_Sig_Gaus2_n').setMin(0.0) ; w.obj('Norm_D0M_Sig_Gaus2_n').setMax(20.0) ; w.obj('Norm_D0M_Sig_Gaus2_n').setVal(1.0) ; w.obj('Norm_D0M_Sig_Gaus2_n').setError(0.0563707589481) # WARNING derivitive_ratio bottom is zero 102 100475.575388 104489.200281 104489.200281 w.obj('Norm_D0M_Sig_Gaus_Mean').setMin(1864.1) ; w.obj('Norm_D0M_Sig_Gaus_Mean').setMax(1868.6) ; w.obj('Norm_D0M_Sig_Gaus_Mean').setVal(1866.35) ; w.obj('Norm_D0M_Sig_Gaus_Mean').setError(0.0282504176865) # WARNING derivitive_ratio bottom is zero 102 950.574510 1090.331482 1090.331482 # INFO extrapolate upper edge [False, 8.675242367234118, 1610.0] w.obj('Norm_DelM_Bkg_a').setMin(-30.7) ; w.obj('Norm_DelM_Bkg_a').setMax(8.67524236723) ; w.obj('Norm_DelM_Bkg_a').setVal(-14.9499030531) ; w.obj('Norm_DelM_Bkg_a').setError(0.352110679985) # WARNING derivitive_ratio bottom is zero 286 2509.484557 2524.607389 2524.607389 # INFO extrapolate lower edge [False, 135.51700895240305, 1610.0] w.obj('Norm_DelM_Bkg_m0').setMin(135.517008952) ; w.obj('Norm_DelM_Bkg_m0').setMax(140.245) ; w.obj('Norm_DelM_Bkg_m0').setVal(139.29940179) ; w.obj('Norm_DelM_Bkg_m0').setError(0.00278162353152) # WARNING derivitive_ratio bottom is zero 168 12637.921551 12941.900194 12941.900194 w.obj('Norm_DelM_Sig_Gaus1_Sigma').setMin(0.275) ; w.obj('Norm_DelM_Sig_Gaus1_Sigma').setMax(0.58) ; w.obj('Norm_DelM_Sig_Gaus1_Sigma').setVal(0.4275) ; w.obj('Norm_DelM_Sig_Gaus1_Sigma').setError(0.00623690681335) # WARNING derivitive_ratio bottom is zero 140 7646.478871 7794.222379 7794.222379 w.obj('Norm_DelM_Sig_Gaus2_Sigma_Diff').setMin(0.145) ; w.obj('Norm_DelM_Sig_Gaus2_Sigma_Diff').setMax(0.56) ; w.obj('Norm_DelM_Sig_Gaus2_Sigma_Diff').setVal(0.311) ; w.obj('Norm_DelM_Sig_Gaus2_Sigma_Diff').setError(0.00961908230633) # WARNING derivitive_ratio bottom is zero 135 7889.258317 7968.149060 7968.149060 w.obj('Norm_DelM_Sig_Gaus3_Mean').setMin(144.385) ; w.obj('Norm_DelM_Sig_Gaus3_Mean').setMax(147.52) ; w.obj('Norm_DelM_Sig_Gaus3_Mean').setVal(145.9525) ; w.obj('Norm_DelM_Sig_Gaus3_Mean').setError(0.0297743321414) # WARNING derivitive_ratio bottom is zero 127 1441.937761 1456.115731 1456.115731 # INFO extrapolate lower edge [False, -1.562458158266192, 1610.0] # INFO extrapolate upper edge [False, 3.2886360831918586, 1610.0] # WARNING lower edge -1.562458 hits limit 0.000000 w.obj('Norm_DelM_Sig_Gaus3_Sigma_Diff').setMin(0.0) ; w.obj('Norm_DelM_Sig_Gaus3_Sigma_Diff').setMax(3.28863608319) ; w.obj('Norm_DelM_Sig_Gaus3_Sigma_Diff').setVal(0.657727216638) ; w.obj('Norm_DelM_Sig_Gaus3_Sigma_Diff').setError(0.037812360311) # WARNING derivitive_ratio bottom is zero 102 17487.965272 18067.587218 18067.587218 w.obj('Norm_DelM_Sig_Gaus_Mean').setMin(145.25) ; w.obj('Norm_DelM_Sig_Gaus_Mean').setMax(145.6) ; w.obj('Norm_DelM_Sig_Gaus_Mean').setVal(145.425) ; w.obj('Norm_DelM_Sig_Gaus_Mean').setError(0.00389207378424) # WARNING derivitive_ratio bottom is zero 171 1440.310455 1466.512994 1466.512994 # INFO extrapolate lower edge [False, -1161.7298062300902, 1610.0] # INFO extrapolate upper edge [False, 6502.909990291508, 1610.0] # WARNING lower edge -1161.729806 hits limit 0.000000 w.obj('Norm_N_Comb').setMin(0.0*config["normScale"]) ; w.obj('Norm_N_Comb').setMax(6502.90999029*config["normScale"]) ; w.obj('Norm_N_Comb').setVal(1300.58199806*config["normScale"]) ; w.obj('Norm_N_Comb').setError(69.7188799676/sqrt(config["normScale"])) # WARNING derivitive_ratio bottom is zero 142 1629.344232 1679.974917 1679.974917 # INFO extrapolate lower edge [False, 10931.649121116747, 1610.0] w.obj('Norm_N_Prompt').setMin(10931.6491211*config["normScale"]) ; w.obj('Norm_N_Prompt').setMax(30995.7616494*config["normScale"]) ; w.obj('Norm_N_Prompt').setVal(18957.2941324*config["normScale"]) ; w.obj('Norm_N_Prompt').setError(247.513270145/sqrt(config["normScale"])) # WARNING derivitive_ratio bottom is zero 148 1585.358526 1642.672043 1642.672043 # INFO extrapolate lower edge [False, 64907.007351854576, 1610.0] w.obj('Norm_N_Sig').setMin(64907.0073519*config["normScale"]) ; w.obj('Norm_N_Sig').setMax(99148.5248*config["normScale"]) ; w.obj('Norm_N_Sig').setVal(82027.7660759*config["normScale"]) ; w.obj('Norm_N_Sig').setError(349.170253164/sqrt(config["normScale"])) #w.obj('Norm_DelM_Bkg_b').setVal(0) #w.obj('Norm_DelM_Bkg_b').setConstant(True) w.obj('Norm_D0M_Sig_Gaus1_Frac').setVal(0.7) #w.obj('Norm_D0M_Sig_Gaus3_Frac').setVal(0.35) w.obj('Norm_DelM_Sig_Gaus1_Frac').setVal(0.25) w.obj('Norm_DelM_Sig_Gaus3_Frac').setVal(0.15) w.obj('Norm_D0M_Sig_Gaus1_Frac').setConstant(True) #w.obj('Norm_D0M_Sig_Gaus3_Frac').setConstant(True) w.obj('Norm_DelM_Sig_Gaus1_Frac').setConstant(True) w.obj('Norm_DelM_Sig_Gaus3_Frac').setConstant(True) config['postHook'](w) return w
f = ROOT.TFile( "/cms/ldap_home/wjjang/wj_nanoAOD_CMSSW_9_4_4/src/nano/analysis/test/vts/tmva/output/vts_dR_04_Jet.root" ) t = f.Get(str(sys.argv[1]) + "/Method_BDT/BDT") t2 = f.Get(str(sys.argv[2]) + "/Method_BDT/BDT") h_sig = t.Get("MVA_BDT_S") h_bkg = t.Get("MVA_BDT_B") h_sig2 = t2.Get("MVA_BDT_S") h_bkg2 = t2.Get("MVA_BDT_B") w = RooWorkspace() c = ROOT.TCanvas("plots", "plots", 1920, 1080) c.Divide(4, 2) w.factory("bdt[-1,1]") rooh_sig = ROOT.RooDataHist("sigHist", "sigHist", ROOT.RooArgList(w.var("bdt")), h_sig) rooh_bkg = ROOT.RooDataHist("bkgHist", "bkgHist", ROOT.RooArgList(w.var("bdt")), h_bkg) getattr(w, 'import')(rooh_sig) getattr(w, 'import')(rooh_bkg) if t.GetPath().find("_JKS_") is not -1 or t.GetPath().find("JKS_") is not -1: w.factory( "Gaussian::sig_1(bdt, meanSig1[-0.0317964,-1, 1.], sigmaSig1[0.0654243,0, 1])" ) w.factory( "Gaussian::sig_2(bdt, meanSig2[-0.1, -1, 1.], sigmaSig2[0.0656842, 0, 1] )" ) fit_sig = w.factory("SUM::sig(sig_1,f1[.5, 0, 1]*sig_2)")
limitHist=TH2D("limit","limit",n_massbins,xedges,n_epsbins,yedges) detectableHist=TH2D("detectable","detectable",n_massbins,xedges,n_epsbins,yedges) gammactHist=TH2D("gammact","gammact",n_massbins,xedges,n_epsbins,yedges) allzHist=TH2D("detectable_allz","detectable_allz",n_massbins,xedges,n_epsbins,yedges) prodHist=TH2D("production","production",n_massbins,xedges,n_epsbins,yedges) candHist=TH1D("candidates","candidates",n_massbins,xedges) fcLowerHist=TH2D("fcLowerLimit","fcLowerLimit",n_massbins,xedges,n_epsbins,yedges) fcUpperHist=TH2D("fcUpperLimit","fcUpperLimit",n_massbins,xedges,n_epsbins,yedges) plrPvalHist=TH2D("plrPval","plrPval",n_massbins,xedges,n_epsbins,yedges) plrSigHist=TH2D("plrSig","plrSig",n_massbins,xedges,n_epsbins,yedges) logplrHist=TH2D("logplr","logplr",n_massbins,xedges,n_epsbins,yedges) candRescaledHist=TH1D("candidates_rescaled","candidates_rescaled",100,0,1.0) candRescaled2DHist=TH2D("candidates_rescaled_2d","candidates_rescaled_2d",n_massbins,xedges,100,0,1.0) w = RooWorkspace("w") w.factory("{0}[0,0.1]".format(massVar)) w.factory("uncVZ[-100,100]") w.factory("uncP[0,10]") w.factory("cut[0,1]") w.defineSet("myVars","{0},uncVZ".format(massVar)) dataset = RooDataSet("data","data",events,w.set("myVars"),"") w.factory("Gaussian::vtx_model(uncVZ,mean[-50,50],sigma[0,50])") gauss_pdf = w.pdf("vtx_model") w.factory("EXPR::gaussExp('exp( ((@0-@1)<@3)*(-0.5*(@0-@1)^2/@2^2) + ((@0-@1)>=@3)*(-0.5*@3^2/@2^2-(@0-@1-@3)/@4))',uncVZ,gauss_mean[-5,-20,20],gauss_sigma[5,1,50],exp_breakpoint[10,0,50],exp_length[3,0.5,20])") gaussexp_pdf = w.pdf("gaussExp") w.defineSet("obs_1d","uncVZ") obs=w.set("obs_1d") uncVZ = w.var("uncVZ")
nentries = -1 ## sTest = [-20, -10, -5, -2, -1, -0.5, 0, 0.5, 1, 2, 5, 10, 20] ## sTest = [-5, -2, -1, -0.5, 0, 0.5, 1, 2, 5] sTest = [0] phoPtRange = (12,15) # chains = getChains('v11') chains = pmvTrees.getChains('v15') mcTree = chains['z'] dataTree = chains['data'] w = RooWorkspace('w') ## Define variables mmgMass = w.factory('mmgMass[40, 140]') mmMass = w.factory('mmMass[10, 140]') weight = w.factory('weight[1]') phoScale = w.factory('phoScale[0,-50,50]') weight.SetTitle('pileup.weight') phoScale.setUnit('%') ## Photon scaling fraction, dlog(m_uuy)/dlog(E_y) fPho = w.factory('fPho[0.15*91.2,0,100]') fPhoFunc = w.factory('''FormulaVar::fPhoFunc( "mmgMass * (0.5 - 0.5 * mmMass^2 / mmgMass^2)", {mmMass, mmgMass} )''') sFitted, sFittedErr, names = [], [], []
'kpibarMass': [00., 100.], } # open file fIn = TFile.Open('result_flatNtuple.root') # load ntuples from file loadedNtuple = {dName: fIn.Get(dName) for dName in datasetNames} # create workspace space = RooWorkspace('thespace', False) # dict for RooRealVar from vars datavars = {} for varName, varRange in vars.iteritems(): space.factory('{0}[{1},{2}]'.format(varName, varRange[0], varRange[1])) # 5MeV per bin. space.var(varName).setBins(int((varRange[1] - varRange[0]) * 200)) datavars.update({varName: space.var(varName)}) # dict for RooDataSet from loaded Ntuples unbinDatas = {} for dsetName in datasetNames: if loadedNtuple[dsetName] == None: continue unbinDatas.update({ dsetName: RooDataSet( dsetName, dsetName,
def getData(workspace, mean1 = 1., sigma1 = 0.01, mean2 = 1., sigma2 = 0.01, nevents = 5000): ## Make a local workspace to use as a factory. w = RooWorkspace('w', 'w') ## Define the PDF's for m, f1 and f2 in m*sqrt(f1*f2) mPdf = w.factory("BreitWigner::mPdf(m[50,130], MZ[91.12], GammaZ[2.5])") f1Pdf = w.factory("Gaussian::f1Pdf(f1[0.5, 2], mean1[%f], sigma1[%f])" % (mean1, sigma1)) f2Pdf = w.factory("Gaussian::f2Pdf(f2[0.5, 2], mean2[%f], sigma2[%f])" % (mean2, sigma2)) ## Import the PDF's in the given workspace. workspace.Import(mPdf, RenameAllVariables("True")) workspace.Import(f1Pdf, RenameAllVariables("True")) workspace.Import(f2Pdf, RenameAllVariables("True")) ## Generate samples of M, F1 and F2 with a 10% margin for boundaries. moreEvents = int(2*nevents) mData = mPdf.generate(RooArgSet(w.var("m")), moreEvents, NumCPU(3)) f1Data = f1Pdf.generate(RooArgSet(w.var("f1")), moreEvents, NumCPU(3)) f2Data = f2Pdf.generate(RooArgSet(w.var("f2")), moreEvents, NumCPU(3)) ## Create the new data with toy reco mass data = RooDataSet('data', 'toy reco Z->ll mass data', RooArgSet(w.factory('mass[40,140]'))) entry = data.get() ## Loop over the generated values and fill the new reco mass data. for i in range(moreEvents): ## Do we have enough entries already? if data.sumEntries() >= nevents: break ## Get the values of the random variables m, f1, f2. m = mData.get(i).first().getVal() f1 = f1Data.get(i).first().getVal() f2 = f2Data.get(i).first().getVal() ## Here comes the formula!! mass = m * sqrt(f1*f2) ## Is the reco mass within its range? if 60. < mass and mass < 120.: ## Add the reco mass to the data entry.first().setVal(mass) data.addFast(entry) ## End of loop over the generated values workspace.Import(data) return data