Esempio n. 1
0
def create_master_workspaces(meta_data):
    pwd = gDirectory.GetPath()
    ws_list = {}
    for (sample,chanlist) in meta_data.getAssociation().iteritems():
        ws_list[sample] = {}
        for (channel,proclist) in chanlist.iteritems():            
            #make the workspace we're going to use
            this_ws = RooWorkspace('%s-%s'%(channel,sample))
            initialize_workspace(this_ws)
            ws_list[sample][channel] = {}
            for (process,subproclist) in proclist.iteritems():
                print sample, channel, process
                if 'HToZG'in process:
                    ws_list[sample][channel][process] = []
                for (subproc,info) in subproclist.iteritems():
                    print '\tprocessing: %s'%subproc
                    input_file = info['input_file']
                    info['num_mc_events'] = -1
                    if input_file == '':
                        print '\t no input file found! Skipping!'
                        continue
                    if 'data' not in process:                        
                        print '\t mc input = %s'%input_file.split('/')[-1]
                        if 'HToZG' in subproc:                            
                            info['num_mc_events'] = \
                                  extract_higgs_data_in_categories(subproc,
                                                                   input_file,
                                                                   this_ws)
                            ws_list[sample][channel][process].append(subproc)
                        else:
                            info['num_mc_events'] = \
                                  extract_bkg_data_in_categories(subproc,
                                                                 input_file,
                                                                 this_ws)
                    else:
                        print '\t data input = %s'%input_file.split('/')[-1]
                        extract_data_in_categories(channel,input_file,this_ws)
            #end loop over processes and data
            fout_name = '%s_%s_master_workspace.root'%(channel,sample)
            fout = TFile.Open(fout_name,'recreate')
            fout.cd()
            this_ws.Write()
            fout.Close()
            gDirectory.cd(pwd)
            ws_list[sample][channel]['filename'] = fout_name        
    return ws_list
Esempio n. 2
0
def Split_DS(Save_DS=False):
    ws_file = TFile(
        "/afs/cern.ch/user/" + prefix + "/" + user + "/eos/lhcb/user/" +
        prefix + "/" + user +
        "/WrongSign/2015/WorkSpaces/Merged_Merged_WS.root", "read")
    wsp = ws_file.Get("wspace")
    ws_file.Close()
    LOG_D0_IPCHI2_OWNPV = wsp.var("LOG_D0_IPCHI2_OWNPV")
    Dst_DTF_D0_CTAU = wsp.var("Dst_DTF_D0_CTAU")
    Dst_DTF_D0_M = wsp.var("Dst_DTF_D0_M")
    DTF_D0sPi_M = wsp.var("DTF_D0sPi_M")
    DTF_D0sPi_M.setMax(2020)
    DTF_D0sPi_M.setMin(2000)
    dataset_RS_tot = wsp.data("dataset_RS")
    dataset_RS_tot.SetName("dataset_RS_tot")

    varset = RooArgSet("varset")
    varset.add(LOG_D0_IPCHI2_OWNPV)
    varset.add(DTF_D0sPi_M)
    varset.add(Dst_DTF_D0_CTAU)
    varset.add(Dst_DTF_D0_M)

    for i, bin in enumerate(decaytime_binnning):
        start = datetime.now()
        dataset_RS_dtb_init = RooDataSet(
            "dataset_RS_dtb_init", "Decaytime bin" + str(i), dataset_RS_tot,
            varset, "Dst_DTF_D0_CTAU>" + str(bin[0] * ctau) +
            "&&Dst_DTF_D0_CTAU<" + str(bin[1] * ctau) + "&&" + offline_cut)
        dataset_RS = Subtract_Distribution(dataset_RS_dtb_init, DTF_D0sPi_M,
                                           LOG_D0_IPCHI2_OWNPV, str(i))
        dataset_RS.SetName("dataset_RS")
        wspace = RooWorkspace("wspace")
        wsfile2 = TFile(
            "~/eos/lhcb/user/" + prefix + "/" + user +
            "/WrongSign/2015/WorkSpaces/Merged_WS_Bin_" + str(i) + ".root",
            "recreate")
        wspace.rfimport(varset)
        wspace.rfimport(dataset_RS)
        wspace.Write("wspace")
        wsfile2.Close()
        print "Dataset " + str(i) + " creation took  " + str(datetime.now() -
                                                             start) + " \n"

    return True
Esempio n. 3
0
def main(options, args):
    gROOT.Reset()

    #load our super special Polarization PDF
    gROOT.ProcessLine('.L RooPolarizationPdf.cxx+')
    gROOT.ProcessLine('.L RooPolarizationConstraint.cxx+')

    #setup integration
    intConf = ROOT.RooAbsReal.defaultIntegratorConfig()
    #intConf.Print('v')
    #    intConf.method1D().setLabel('RooAdaptiveGaussKronrodIntegrator1D')
    intConf.setEpsAbs(1e-13)
    intConf.setEpsRel(1e-13)
    print intConf.epsAbs()
    print intConf.epsRel()
    #    intConf.method2D().setLabel('RooMCIntegrator')
    #    intConf.methodND().setLabel('RooMCIntegrator')

    output = TFile.Open(options.workspaceName + '.root', 'RECREATE')

    theWS = RooWorkspace(options.workspaceName, 1)

    #save the polarization PDF code in the RooWorkspace
    theWS.importClassCode('RooPolarization*', True)

    buildDataAndCategories(theWS, options, args)

    buildMassAndLifetimePDF(theWS)

    #    if options.fitFrame is not None:
    #        buildPolarizationPDF(theWS,options)

    #root is stupid
    output.cd()

    theWS.Print('v')

    ROOT.RooMsgService.instance().Print()

    doFit(theWS, options)

    theWS.Write()
    output.Close()
            numBkg.setVal(snapNumBkg)
            fitres = model.fitTo(dataset, RooFit.Save(True),
                                 RooFit.Minos(useMinos))

            plot = var.frame(RooFit.Title('cuts=' + '&&'.join(cuttitle)))
            dataset.plotOn(plot)
            model.plotOn(
                plot,
                RooFit.Normalization(dataset.sumEntries(),
                                     RooAbsReal.NumEvent))
            SaveResult(origPlot=plot,
                       origVar=var,
                       data={'content': dataset},
                       totPDF={'content': model},
                       fitres=fitres,
                       label=label + '_' + ''.join(str(a) for a in cutIndx[i]),
                       fitDir=fitDir,
                       figDir=figDir,
                       absNumNormalize=dataset.sumEntries())
    # scan lB likelihood end }}}
    canv.Clear()

canv.SaveAs(tMPfIG + ']')
outFile.cd()
space.Write()
outFile.Close()
os.system('mv {0} {1}'.format(tMPrOOTnAME, storeFileName))
os.system('mv {0} {1}'.format(tMPfIG, storeFigName))

txtRecFile.close()
def main(o, args):
    variables = options.variables
    procs = options.procs
    cats = options.cats
    print "****variables***"
    print variables
    #from configfile import variables, procs, cats
    if 0 and options.files.startswith("/store"):
        filepath = "root://eoscms/" + str(options.files)
    else:
        filepath = options.files
    print 'filepath is ' + str(filepath)
    infile = TFile.Open(filepath)
    infile.Print()
    wspace = infile.Get("tagsDumper/cms_hgg_13TeV")
    wspace.Print()

    isData = False
    dname = ""
    print variables
    for varset in variables:
        print 'we look at the variable ...'
        print varset
        if not varset[-1]:
            #varset[-1]=varset[-2]
            varset = (varset[0], varset[1], varset[2], varset[2])
        if not varset[1]:
            varset = (varset[0], varset[0], varset[2], [1, 1])
            isData = True
        datasetsReduced = {}
        c = TCanvas("c", "", 1)
        binning1 = varset[-2]
        binning1[-1] = binning1[-2] * 1.2
        print binning1
        binning2 = varset[-1]
        binning2[-1] = binning2[-2] * 1.2
        print binning2
        th2sigtot = TH2D(
            "signalEv_" + str(varset[0]) + "_vs_" + str(varset[1]) + "_all",
            "signalEv_" + str(varset[0]) + "_vs_" + str(varset[1]) + "_all",
            len(binning1) - 1, np.asarray(binning1),
            len(binning2) - 1, np.asarray(binning2))
        th2sigtot.GetXaxis().SetTitle(str(varset[0]))
        th2sigtot.GetYaxis().SetTitle(str(varset[1]))
        for proc in procs:
            for cat in cats:
                print "reading ds " + str(proc) + "_" + str(cat) + " from ws"
                dataset = wspace.data(str(proc) + "_" + str(cat))
                dataset.Print()
                th2sig = TH2D(
                    "signalEv_" + str(varset[0]) + "_vs_" + str(varset[1]) +
                    "_" + str(proc) + "_" + str(cat),
                    "signalEv_" + str(varset[0]) + "_vs_" + str(varset[1]) +
                    "_" + str(proc) + "_" + str(cat),
                    len(binning1) - 1, np.asarray(binning1),
                    len(binning2) - 1, np.asarray(binning2))
                th2sig.GetXaxis().SetTitle(str(varset[0]))
                th2sig.GetYaxis().SetTitle(str(varset[1]))
                for bound1 in range(len(varset[-2]) - 1):
                    for bound2 in range(len(varset[-1]) - 1):
                        if isData:
                            dname = str(proc) + "_" + str(
                                cat) + "_" + varset[0] + "_" + str(
                                    varset[-2][bound1]) + "to" + str(
                                        varset[-2][bound1 + 1])
                        else:
                            #                            dname = str(proc)+"_"+str(cat)+"_"+varset[0]+"_"+str(varset[-2][bound1])+"to"+str(varset[-2][bound1+1])+"_Vs_"+varset[1]+"_"+str(varset[-1][bound2])+"to"+str(varset[-1][bound2+1])
                            dname = str(
                                proc.split('_', 1)[0]
                            ) + "_" + varset[0] + "_" + str(
                                varset[-2][bound1]
                            ) + "to" + str(varset[-2][bound1 + 1]) + "_" + str(
                                proc.split('_', 1)[1]) + "_" + str(
                                    cat) + "_" + varset[1] + "_" + str(
                                        varset[-1][bound2]) + "to" + str(
                                            varset[-1][bound2 + 1])
                        datRed = dataset.Clone(dname)
                        datRed.reset()
                        datasetsReduced[dname] = datRed
                val = 0.
                maxEntries = dataset.numEntries()
                if options.maxEntries != -1:
                    maxEntries = options.maxEntries
                for i in range(maxEntries):
                    if i % 10000 == 0:
                        print i
                    iset = dataset.get(i)
                    val1 = iset.getRealValue(varset[0])
                    val2 = iset.getRealValue(varset[1])
                    #                    print val1,val2

                    for bound1, bound2 in (
                        (b1, b2) for b1 in range(len(varset[-2]) - 1)
                            for b2 in range(len(varset[-1]) - 1)):
                        if isData:
                            selCondition = (val1 > varset[-2][bound1]) and (
                                val1 < varset[-2][bound1 + 1])
                            dname = str(proc) + "_" + str(
                                cat) + "_" + varset[0] + "_" + str(
                                    varset[-2][bound1]) + "to" + str(
                                        varset[-2][bound1 + 1])
                        else:
                            selCondition = (val1 > varset[-2][bound1]) and (
                                val1 < varset[-2][bound1 + 1]) and (
                                    val2 > varset[-1][bound2]) and (
                                        val2 < varset[-1][bound2 + 1])
                            #                            dname = str(proc)+"_"+str(cat)+"_"+varset[0]+"_"+str(varset[-2][bound1])+"to"+str(varset[-2][bound1+1])+"_Vs_"+varset[1]+"_"+str(varset[-1][bound2])+"to"+str(varset[-1][bound2+1])
                            dname = str(
                                proc.split('_', 1)[0]
                            ) + "_" + varset[0] + "_" + str(
                                varset[-2][bound1]
                            ) + "to" + str(varset[-2][bound1 + 1]) + "_" + str(
                                proc.split('_', 1)[1]) + "_" + str(
                                    cat) + "_" + varset[1] + "_" + str(
                                        varset[-1][bound2]) + "to" + str(
                                            varset[-1][bound2 + 1])
                        if selCondition:
                            #                            print "filling dataset"
                            ##                            print dataset.weight()
                            datasetsReduced[dname].add(iset, dataset.weight())
                            break
                for bound1 in range(len(varset[-2]) - 1):
                    for bound2 in range(len(varset[-1]) - 1):
                        if isData:
                            dname = str(proc) + "_" + str(
                                cat) + "_" + varset[0] + "_" + str(
                                    varset[-2][bound1]) + "to" + str(
                                        varset[-2][bound1 + 1])
                        else:
                            dname = str(
                                proc.split('_', 1)[0]
                            ) + "_" + varset[0] + "_" + str(
                                varset[-2][bound1]
                            ) + "to" + str(varset[-2][bound1 + 1]) + "_" + str(
                                proc.split('_', 1)[1]) + "_" + str(
                                    cat) + "_" + varset[1] + "_" + str(
                                        varset[-1][bound2]) + "to" + str(
                                            varset[-1][bound2 + 1])

                            print "binning1 ", float(binning1[bound1]) + (
                                float(binning1[bound1 + 1]) -
                                float(binning1[bound1])) / 2.
                            print binning1[bound1 + 1]
                            print binning1[bound1]
                            print "cat1 ", (float(varset[-2][bound1 + 1]) -
                                            float(varset[-2][bound1])) / 2.
                            print "binning2 ", float(binning2[bound2]) + (
                                float(binning2[bound2 + 1]) -
                                float(binning2[bound2])) / 2.
                            print binning2[bound2 + 1]
                            print binning2[bound2]
                            print "cat2 ", (float(varset[-1][bound2 + 1]) -
                                            float(varset[-1][bound2])) / 2.
                            print "sumentries ", datasetsReduced[
                                dname].sumEntries()
                            th2sig.Fill(
                                float(binning1[bound1]) +
                                (float(binning1[bound1 + 1]) -
                                 float(binning1[bound1])) / 2.,
                                float(binning2[bound2]) +
                                (float(binning2[bound2 + 1]) -
                                 float(binning2[bound2])) / 2.,
                                datasetsReduced[dname].sumEntries())

                th2sig.Print("V")
                c.cd()
                if options.logz:
                    c.SetLogz(1)

                th2sigtot.Add(th2sig)
                th2sig.Draw("colz")
                for fmt in savefmts:
                    savename = th2sig.GetName()
                    if options.logz:
                        savename = str(savename) + "_logz"
                    c.SaveAs(str(savename) + str(fmt))
        c.cd()
        if options.logz:
            c.SetLogz(1)
        th2sigtot.Draw("colz")
        savename = th2sigtot.GetName()
        if options.logz:
            savename = str(savename) + "_logz"
        for fmt in savefmts:
            c.SaveAs(str(savename) + str(fmt))
#            print "####  Compare reductions  ####"

        new_wspace = RooWorkspace("cms_hgg_13TeV")
        getattr(new_wspace, 'import')(wspace.var("CMS_hgg_mass"))
        getattr(new_wspace, 'import')(wspace.var("IntLumi"))
        if not isData:
            getattr(new_wspace, 'import')(wspace.var("dZ"))
        getattr(new_wspace, 'import')(wspace.var("weight"))
        #    alldata = new_wspace.allData()
        #    for ds in alldata :
        #        new_wspace.removeSet(str(ds.GetName()))
        outfilename = options.outfile.split('.')[0] + '_' + str(
            varset[0]) + '_3th2.root'
        outfile = TFile(outfilename, 'RECREATE')
        stepsize = int(len(datasetsReduced.keys()) / 10)
        iteration = 0
        while (len(datasetsReduced.keys()) > 0):
            print 'iteration ' + str(iteration)
            iteration = iteration + 1
            try:
                outfile
            except NameError:
                outfile = TFile(outfilename, 'UPDATE')

            try:
                new_wspace
            except NameError:
                outfile.ls()
                new_wspace = outfile.Get("cms_hgg_13TeV")
                print "cms_hgg_13TeV;1"
                gDirectory.Delete("cms_hgg_13TeV;1")
                outfile.ls()
#        if not 'outfile' in globals():
#            print 'outfile is not defined'
#            outfile = TFile(options.outfile, 'UPDATE')
#        if not 'new_wspace' in globals():
#            new_wspace = outfile.Get("cms_hgg_13TeV")
#            print 'newspace is not defined'
            written = []
            if stepsize > len(datasetsReduced.keys()):
                stepsize = len(datasetsReduced.keys())
            for ikey in range(stepsize):
                #    for ikey in range(len(datasetsReduced.keys())):
                #        dataset.Print("V")
                #        if dataset.numEntries() > 100:
                getattr(new_wspace, 'import')(
                    datasetsReduced[datasetsReduced.keys()[ikey]])
                datasetsReduced[datasetsReduced.keys()[ikey]].Write()
                written.append(datasetsReduced.keys()[ikey])
            print 'Number of data in ws '
            print len(new_wspace.allData())
            #new_wspace.Print()
            new_wspace.Write()
            outfile.Write()
            outfile.Close()
            for wkey in written:
                del datasetsReduced[wkey]
            del new_wspace
            del outfile
Esempio n. 6
0
key_COMB = []
hist_COMB = []
for i, bin in enumerate(decaytime_binnning):
    if i == 0:
        continue
    start = datetime.now()
    dataset_COMB_CORR_dtb_init = RooDataSet("dataset_COMB_CORR_dtb_init","Decaytime bin"+str(i),dataset_COMB_CORR,varset_small,"Dst_DTF_D0_CTAU>"+str(bin[0]*ctau)+"&&Dst_DTF_D0_CTAU<"+str(bin[1]*ctau))
    dataset_COMB_CORR_dtb = Subtract_Distribution(dataset_COMB_CORR_dtb_init, DTF_D0sPi_M, LOG_D0_IPCHI2_OWNPV, str(i)+"_comb", True)
    dataset_COMB_CORR_dtb.SetName("dataset_COMB_CORR_dtb")
    print "Background substraction from combinatorial tool  "+str(datetime.now()-start)+" \n"

    hist_COMB.append(RooDataHist("hist_COMB"+str(i),"hist_COMB", RooArgSet(LOG_D0_IPCHI2_OWNPV), dataset_COMB_CORR_dtb))
    key_COMB.append(RooKeysPdf("key_COMB_"+str(i), "key_COMB", LOG_D0_IPCHI2_OWNPV, dataset_COMB_CORR_dtb))

#We store shapes of Log(IPCHI2) for mathced candidates in for each decay time bins here.
wspace_2 = RooWorkspace("wspace_key_shapes")
wspace_2.Print("t")
wsfile = TFile("/afs/cern.ch/user/"+prefix+"/"+user+"/eos/lhcb/user/"+prefix+"/"+user+"/WrongSign/2015/Secondary_Key_Shapes.root", "recreate")
for k in key_COMB:
    wspace_2.rfimport(k) 
wspace_2.Write("wspace")

wspace_3 = RooWorkspace("wspace_hist_shapes")
wspace_3.Print("t")
wsfile = TFile("/afs/cern.ch/user/"+prefix+"/"+user+"/eos/lhcb/user/"+prefix+"/"+user+"/WrongSign/2015/Secondary_Hist_Shapes.root", "recreate")
for s in hist_COMB:
    wspace_3.rfimport(s) 
wspace_3.Write("wspace")

Esempio n. 7
0
    "TMath::Sqrt(1865**2+sPi_M**2 + 2*(Dst_DTF_D0_PE*Dst_DTF_sPi_PE - (Dst_DTF_D0_PX*Dst_DTF_sPi_PX+Dst_DTF_D0_PY*Dst_DTF_sPi_PY+Dst_DTF_D0_PZ*Dst_DTF_sPi_PZ)))",
    RooArgList(sPi_M, Dst_DTF_D0_PE, Dst_DTF_sPi_PE, Dst_DTF_D0_PX,
               Dst_DTF_sPi_PX, Dst_DTF_D0_PY, Dst_DTF_sPi_PY, Dst_DTF_D0_PZ,
               Dst_DTF_sPi_PZ))
dataset_WS.addColumn(DTF_D0sPi_M).setRange(1700, 2100)
dataset_RS.addColumn(DTF_D0sPi_M).setRange(1700, 2100)

try:
    test_mode = sys.argv[2]
except:
    test_mode = False

wspace = RooWorkspace("wspace")
wspace.Print("t")
if not test_mode:
    #Here is address of output file
    wsfile = TFile(
        "/afs/cern.ch/user/" + prefix + "/" + user + "/eos/lhcb/user/" +
        prefix + "/" + user + "/WrongSign/2015/WorkSpaces/WorkSpace" +
        id_seed + ".root", "recreate")
else:
    wsfile = TFile("WorkSpace" + id_seed + ".root", "recreate")
wspace.rfimport(varset)
wspace.rfimport(varset_comb)
wspace.rfimport(dataset_COMB_OS)
wspace.rfimport(dataset_COMB_SS)
wspace.rfimport(dataset_WS)
wspace.rfimport(dataset_RS)
wspace.Write("wspace")
os.remove("/tmp/" + user + "/temp" + id_seed + ".root")
Esempio n. 8
0
    truth = sys.argv[7]  #truth model type!!!

    bs = RooWorkspace('bias_study')

    bs.factory("procWeight[0]")
    bs.factory("puWeight[0]")
    bs.factory("weight[0]")
    bs.factory("Mzg[100,180]")
    bs.var("Mzg").setRange("ROI", mass - 1.5, mass + 1.5)
    bs.var("Mzg").setBins(40000, "cache")
    bs.factory("Mz[0]")
    #bs.factory("dMzg[0,25]")
    #bs.factory("dMz[0,25]")
    bs.factory("r94cat[cat1=1,cat2=2,cat3=3,cat4=4]")
    bs.defineSet("observables", "Mzg,Mz,r94cat,procWeight,puWeight")
    bs.defineSet("observables_weight",
                 "Mzg,Mz,r94cat,procWeight,puWeight,weight")

    prepare_truth_models(bs, category, mass, channel, turnon, truth)

    build_fitting_models(bs, category, mass, order, turnon)

    gen_data_and_fit(bs, ntoys, category, mass, channel, turnon, truth)

    out_f = TFile.Open(
        "bias_study_%s_%s_%s_ntoys%i_cat%i_m%s_order%i.root" %
        (channel, turnon, truth, ntoys, category, str(mass).replace(
            '.', 'p'), order), "recreate")
    bs.Write()
    out_f.Close()
Esempio n. 9
0
def main(options, args):

    cfg = options.config
    workspaceName = cfg.get('Global', 'workspace')

    ws = RooWorkspace(workspaceName)

    #ws.Print("v")

    setupWorkspace(ws, options)

    #create -log(likelihood)

    theNLL = ws.pdf('TopLevelPdf').createNLL(
        ws.data('allcountingdata'), RooFit.NumCPU(1),
        RooFit.ConditionalObservables(ws.set('condObs')), RooFit.Verbose(True))

    ws.saveSnapshot('standardmodel', ws.allVars())

    minuit = ROOT.RooMinuit(theNLL)
    minuit.setPrintLevel(1)
    minuit.setPrintEvalErrors(-1)
    minuit.setErrorLevel(.5)

    #find the values of the parameters that minimize the likelihood
    minuit.setStrategy(2)
    minuit.simplex()
    minuit.migrad()
    minuit.hesse()

    #ws.var('err_gl').setConstant(True)
    #ws.var('err_gs').setConstant(True)
    #ws.var('err_gb').setConstant(True)

    ws.defineSet(
        'POI',
        ROOT.RooArgSet(
            ws.var('%s_%s' % (cfg.get(
                'Global', 'par1Name'), cfg.get('Global', 'couplingType'))),
            ws.var('%s_%s' % (cfg.get(
                'Global', 'par2Name'), cfg.get('Global', 'couplingType')))))

    ws.saveSnapshot('%s_fitresult' % cfg.get('Global', 'couplingType'),
                    ws.allVars())

    #create profile likelihood
    level_68 = ROOT.TMath.ChisquareQuantile(
        .68, 2) / 2.0  # delta NLL for 68% confidence level for -log(LR)
    level_95 = ROOT.TMath.ChisquareQuantile(
        .95, 2) / 2.0  # delta NLL for 95% confidence level for -log(LR)

    print
    print '68% CL Delta-NLL 2 DOF=', level_68
    print '95% CL Delta-NLL 2 DOF=', level_95

    minuit.setPrintLevel(1)
    minuit.setPrintEvalErrors(-1)

    minuit.migrad()
    minuit.minos(ws.set('POI'))

    thePlot = minuit.contour(
        ws.var('%s_%s' % (cfg.get(
            'Global', 'par1Name'), cfg.get('Global', 'couplingType'))),
        ws.var('%s_%s' % (cfg.get(
            'Global', 'par2Name'), cfg.get('Global', 'couplingType'))),
        sqrt(2 * level_95), sqrt(2 * level_68))  # here the error is in sigmas

    thePlot.SetName(
        '%s_%s_%s_contour' %
        (cfg.get('Global', 'par1Name'), cfg.get(
            'Global', 'par2Name'), cfg.get('Global', 'couplingType')))

    thePlot.SetTitle('68% & 95% CL on the Best Fit Values of ' +
                     cfg.get('Global', 'par1Name') + ' and ' +
                     cfg.get('Global', 'par2Name'))
    legend = ROOT.TLegend(2.01612903225806439e-01, 7.86016949152542388e-01,
                          7.15725806451612989e-01, 9.13135593220338992e-01)
    legend.SetNColumns(2)
    thePlot.addObject(legend)

    # 1-D Limits

    level_95 = ROOT.TMath.ChisquareQuantile(
        .95, 1) / 2.0  # delta NLL for -log(LR) with 1 dof
    print '95% CL Delta-NLL 1 DOF=', level_95
    minuit.setErrorLevel(level_95)

    #set 1-D limits on parameter 1 with parameter 2 == 0
    ws.var('%s_%s' % (cfg.get(
        'Global', 'par2Name'), cfg.get('Global', 'couplingType'))).setVal(0.0)
    ws.var('%s_%s' % (cfg.get('Global', 'par2Name'),
                      cfg.get('Global', 'couplingType'))).setConstant(True)
    minuit.minos(ws.set('POI'))

    parm1 = ws.var(
        '%s_%s' %
        (cfg.get('Global', 'par1Name'), cfg.get('Global', 'couplingType')))

    print 'parameter 1 value: ' + str(parm1.getVal())

    if not (0 < parm1.getVal() + parm1.getErrorHi()
            and 0 > parm1.getVal() + parm1.getErrorLo()):
        print '95% CL does not cover SM for parameter 1'
    else:
        print '95% CL covers SM for parameter 1'

    par1Line = ROOT.TLine(parm1.getVal() + parm1.getErrorLo(), 0,
                          parm1.getVal() + parm1.getErrorHi(), 0)
    par1Line.SetLineWidth(2)
    par1Line.SetLineColor(ROOT.kRed)

    thePlot.addObject(par1Line)

    #set 1-D limits on parameter 2 with parameter 1 == 0
    ws.var('%s_%s' % (cfg.get('Global', 'par2Name'),
                      cfg.get('Global', 'couplingType'))).setConstant(False)
    ws.var('%s_%s' % (cfg.get(
        'Global', 'par1Name'), cfg.get('Global', 'couplingType'))).setVal(0.0)
    ws.var('%s_%s' % (cfg.get('Global', 'par1Name'),
                      cfg.get('Global', 'couplingType'))).setConstant(True)
    minuit.minos(ws.set('POI'))

    parm2 = ws.var(
        '%s_%s' %
        (cfg.get('Global', 'par2Name'), cfg.get('Global', 'couplingType')))

    print 'parameter 2 value: ' + str(parm2.getVal())

    if not (0 < parm2.getVal() + parm2.getErrorHi()
            and 0 > parm2.getVal() + parm2.getErrorLo()):
        print '95% CL does not cover SM for parameter 2'
    else:
        print '95% CL covers SM for parameter 2'

    par2Line = ROOT.TLine(0,
                          parm2.getVal() + parm2.getErrorLo(), 0,
                          parm2.getVal() + parm2.getErrorHi())
    par2Line.SetLineWidth(2)
    par2Line.SetLineColor(ROOT.kRed)

    thePlot.addObject(par2Line)

    ws.var('%s_%s' % (cfg.get('Global', 'par1Name'),
                      cfg.get('Global', 'couplingType'))).setConstant(False)

    #construct likelihood scan histograms
    plot = parm1.frame()
    parm1.setBins(200)
    parm2.setBins(200)

    scanHist = ROOT.TH2F('scan2d_plot', '2D Scan of the Likelihood', 200,
                         parm1.getMin(), parm1.getMax(), 200, parm2.getMin(),
                         parm2.getMax())

    for i in range(200):
        for j in range(200):
            parm1.setVal(parm1.getMin() + (i + .5) *
                         (parm1.getMax() - parm1.getMin()) / 200)
            parm2.setVal(parm2.getMin() + (j + .5) *
                         (parm2.getMax() - parm2.getMin()) / 200)
            scanHist.SetBinContent(i + 1, j + 1, theNLL.getVal())

    profNLL_par1 = theNLL.createProfile(RooArgSet(parm1))
    profNLL_par1_plot = parm1.frame()
    profNLL_par1.plotOn(profNLL_par1_plot)

    profNLL_par2 = theNLL.createProfile(RooArgSet(parm2))
    profNLL_par2_plot = parm2.frame()
    profNLL_par2.plotOn(profNLL_par2_plot)

    initCMSStyle()

    output = TFile.Open(workspaceName + '.root', 'RECREATE')

    ws.Write()
    contCanvas = ROOT.TCanvas('contour_canvas', '', 500, 500)
    thePlot.Draw()
    prettyContour(contCanvas, cfg)
    contCanvas.Write()
    thePlot.Write()

    scanCanvas2D = ROOT.TCanvas('scan2d_canvas', '', 500, 500)
    scanHist.Draw('colz')
    prettyScan(scanCanvas2D, cfg)
    scanCanvas2D.Write()
    scanHist.Write()

    par1ScanCanvas = ROOT.TCanvas('scan1d_par1', '', 500, 500)
    par1ScanCanvas.cd()
    profNLL_par1_plot.Draw()
    par1ScanCanvas.Write()
    profNLL_par1_plot.Write()

    par2ScanCanvas = ROOT.TCanvas('scan1d_par2', '', 500, 500)
    par2ScanCanvas.cd()
    profNLL_par2_plot.Draw()
    par2ScanCanvas.Write()
    profNLL_par2_plot.Write()

    prettyObsPlots(ws, cfg)

    output.Close()

    if options.makeCards:
        print
        print "Creating cards for Higgs Combined Limit calculator!"
        makeHCLCards(ws, cfg)

    return 0
Esempio n. 10
0
def Merged_WS(Save_DS=False):
    file_list = WS_list()
    ws_file = TFile(file_list[0], "read")
    wsp = ws_file.Get("wspace")
    ws_file.Close()
    varset_comb = wsp.allVars()
    B_M = wsp.var("B_M")
    LOG_D0_IPCHI2_OWNPV = wsp.var("LOG_D0_IPCHI2_OWNPV")
    D0_TAU = wsp.var("D0_TAU")
    D0_M = wsp.var("D0_M")
    Dst_M = wsp.var("Dst_M")
    Dst_DTF_D0_CTAU = wsp.var("Dst_DTF_D0_CTAU")
    Dst_DTF_D0_M = wsp.var("Dst_DTF_D0_M")
    D0sPi_M = wsp.var("D0sPi_M")
    B_ENDVERTEX_CHI2 = wsp.var("B_ENDVERTEX_CHI2")
    DTF_D0sPi_M = wsp.var("DTF_D0sPi_M")
    Mu_PT = wsp.var("Mu_PT")
    runNumber = wsp.var("runNumber")

    varset_comb = RooArgSet("varset_comb")
    varset_comb.add(B_M)
    varset_comb.add(D0_TAU)
    varset_comb.add(LOG_D0_IPCHI2_OWNPV)
    varset_comb.add(D0sPi_M)
    varset_comb.add(D0_M)
    varset_comb.add(Mu_PT)
    varset_comb.add(B_ENDVERTEX_CHI2)
    varset_comb.add(runNumber)

    varset = RooArgSet("varset")
    varset.add(D0_M)
    varset.add(D0_TAU)
    varset.add(LOG_D0_IPCHI2_OWNPV)
    varset.add(DTF_D0sPi_M)
    varset.add(Dst_DTF_D0_CTAU)
    varset.add(runNumber)
    varset.add(Dst_DTF_D0_M)

    datasets = {
        'RS': RooDataSet("dataset_RS", "dataset_RS", varset),
        'COMB_OS': RooDataSet("dataset_COMB_OS", "dataset_COMB_OS",
                              varset_comb),
        'COMB_SS': RooDataSet("dataset_COMB_SS", "dataset_COMB_SS",
                              varset_comb)
    }

    for f in file_list:
        print "Adding data from " + f
        ws_file = TFile(f, "read")
        wsp = ws_file.Get("wspace")
        ws_file.Close()
        datasets['RS'].append(wsp.data("dataset_RS"))
        datasets['COMB_OS'].append(wsp.data("dataset_COMB_OS"))
        datasets['COMB_SS'].append(wsp.data("dataset_COMB_SS"))

    if not Save_DS:
        #This is for test purpose only, normaly workspaces is saved.
        wspace = RooWorkspace("wspace")
        wspace.rfimport(varset_comb)
        wspace.rfimport(datasets['RS'])
        wspace.rfimport(datasets['WS'])
        wspace.rfimport(datasets['COMB_OS'])
        wspace.rfimport(datasets['COMB_SS'])
        print "All datasets are added but not written"
        #wspace.Write("wspace")
        return wspace
    else:
        wspace = RooWorkspace("wspace")
        wsfile = TFile(
            "~/eos/lhcb/user/i/ikomarov/WrongSign/2015/WorkSpaces/Merged_Merged_WS.root",
            "recreate")
        wspace.rfimport(varset_comb)
        wspace.rfimport(varset)
        wspace.rfimport(datasets['RS'])
        wspace.rfimport(datasets['COMB_OS'])
        wspace.rfimport(datasets['COMB_SS'])
        print "All datasets are added"
        wspace.Write("wspace")
        return True