예제 #1
0
	def write_datacards(self, datacard_filename_template, root_filename_template, output_directory="."):
		writer = ch.CardWriter(os.path.join("$TAG", datacard_filename_template),
							os.path.join("$TAG", root_filename_template))
		if log.isEnabledFor(logging.DEBUG):
			writer.SetVerbosity(1)

		return writer.WriteCards(output_directory[:-1] if output_directory.endswith("/") else output_directory, self.cb)
예제 #2
0
def write_datacards(cb, datacard_filename_templates, root_filename_template, output_directory):
	for datacard_filename_template in datacard_filename_templates:
		writer = ch.CardWriter(os.path.join("$TAG", datacard_filename_template), os.path.join("$TAG", root_filename_template))
	
		if (len(cb.mass_set()) == 1) and (cb.mass_set()[0] == "*"):
			writer.SetWildcardMasses([])
		
		writer.WriteCards(output_directory, cb)
예제 #3
0
 def write(self, output_datacard, output_shapes):
     logger.info("Create datacard files %s and %s.", output_datacard,
                 output_shapes)
     writer = ch.CardWriter(output_datacard, output_shapes)
     if logger.isEnabledFor(logging.DEBUG):
         writer.SetVerbosity(1)
     writer.CreateDirectories(
         False)  # TODO: FIXME: Does not work without this?
     # writer.SetWildcardMasses([]) # TODO: What is this doing?
     writer.WriteCards("", self.cb)
예제 #4
0
    def harvestEm(self,channel='wenu',charge='both'):
        cmb = ch.CombineHarvester()

        # Read all the cards.
        # CH stores metadata about each object (Observation, Process, Systematic),
        # this is extracted from the card names with some regex
        for card in glob.glob(self.bindir+('/%s_mass*.txt' % channel)):
            cmb.QuickParseDatacard(card, """%s_mass(?<MASS>\d+)_$CHANNEL.card.txt""" % channel)

        # Need a unqiue bin name for each plus/minus,pt and eta combination
        # We extracted this part of the datacard name into the channel variable above,
        # so can just copy it and override the specific bin name that was in all the cards
        cmb.ForEachObj(lambda obj: obj.set_bin(obj.channel()))

        # We'll have three copies of the observation, one for each mass point.
        # Filter all but one copy.
        cmb.FilterObs(lambda obj: obj.mass() != '%d' % self.mwcentral)

        # Create workspace to hold the morphing pdfs and the mass
        w = ROOT.RooWorkspace('morph', 'morph')
        mass = w.factory('mw[{mwrange}]'.format(mwrange=self.mwrange))

        # BuildRooMorphing will dump a load of debug plots here
        debug = ROOT.TFile(self.bindir+'/debug.root', 'RECREATE')

        # Run for each bin,process combination (only for signal!)
        for b in cmb.bin_set():
            for p in cmb.cp().bin([b]).signals().process_set():
                morphing.BuildRooMorphing(w, cmb, b, p, mass, verbose=True, file=debug)

        # Just to be safe
        mass.setConstant(True)

        # Now the workspace is copied into the CH instance and the pdfs attached to the processes
        # (this relies on us knowing that BuildRooMorphing will name the pdfs in a particular way)
        cmb.AddWorkspace(w, True)
        cmb.cp().process(['W']).ExtractPdfs(cmb, 'morph', '$BIN_$PROCESS_morph', '')

        # Adjust the rateParams a bit - we currently have three for each bin (one for each mass),
        # but we only want one. Easiest to drop the existing ones completely and create new ones
        cmb.syst_type(['rateParam'], False)
        cmb.cp().process(['W']).AddSyst(cmb, 'norm_$BIN', 'rateParam', ch.SystMap()(1.00))

        # Have to set the range by hand
        for sys in cmb.cp().syst_type(['rateParam']).syst_name_set():
            cmb.GetParameter(sys).set_range(0.5, 1.5)

        # Print the contents of the model
        cmb.PrintAll()

        # Write out the cards, one per bin
        outdir=self.bindir+'/wenu_cards_morphed_{charge}'.format(charge=charge)
        writer = ch.CardWriter('$TAG/$BIN.txt', '$TAG/shapes.root')
        writer.SetVerbosity(1)
        writer.WriteCards(outdir, cmb)
	def write_datacards(self, datacard_filename_template, root_filename_template, output_directory="."):
		# http://cms-analysis.github.io/CombineHarvester/classch_1_1_card_writer.html#details
		writer = ch.CardWriter(os.path.join("$TAG", datacard_filename_template),
		                       os.path.join("$TAG", root_filename_template))
		if log.isEnabledFor(logging.DEBUG):
			writer.SetVerbosity(1)

		# enable writing datacards in cases where the mass does not have its original meaning
		if (len(self.cb.mass_set()) == 1) and (self.cb.mass_set()[0] == "*"):
			writer.SetWildcardMasses([])

		return writer.WriteCards(output_directory[:-1] if output_directory.endswith("/") else output_directory, self.cb)
예제 #6
0
파일: zp.py 프로젝트: amkalsi/hlim
def rename_and_write(cb):
    print '>> Setting standardised bin names...'
    ch.SetStandardBinNames(cb)

    writer = ch.CardWriter(
        'LIMITS/$TAG/$MASS/$ANALYSIS_$CHANNEL_$BINID_$ERA.txt',
        'LIMITS/$TAG/common/$ANALYSIS_$CHANNEL.input.root')

    # writer.SetVerbosity(1)
    writer.WriteCards('cmb', cb)
    for chn in chns:
        writer.WriteCards(chn, cb.cp().channel([chn]))
    print '>> Done!'
예제 #7
0
def main():
    options = options_()
    for cutkey in options.cut:
        print 'cutkey : ', cutkey
        ### get M_A and M_H ###
        mH = float(options.mH_list[cutkey])
        mA = float(options.mA_list[cutkey])
        print mH, mA
        """Main function"""
        # start the timer
        tstart = datetime.now()
        print 'starting...'
        # get the options
        #options = get_options()

        intL = options.lumi  # in pb-1
        #tag = 'v1.2.0+7415-19-g7bbca78_ZAAnalysis_1a69757'
        #path = '/nfs/scratch/fynu/amertens/cmssw/CMSSW_7_4_15/src/cp3_llbb/CommonTools/histFactory/16_01_28_syst/build'
        tag = 'v1.1.0+7415-83-g2a9f912_ZAAnalysis_2ff9261'
        #tag = 'v1.1.0+7415-57-g4bff5ea_ZAAnalysis_b1377a8'
        path = options.path
        CHANNEL = options.CHANNEL
        ERA = options.ERA
        MASS = str(mH) + "_" + str(mA)
        ANALYSIS = options.ANALYSIS
        DEBUG = 0

        c = ch.CombineHarvester()
        cats = [(0, "mmbbSR" + cutkey), (1, "mll_mmbbBR" + cutkey),
                (2, "eebbSR" + cutkey), (3, "mll_eebbBR" + cutkey)]

        bins = {}
        bins['signalregion_mm'] = "mmbbSR" + cutkey
        bins['mll_bkgregion_mm'] = "mll_mmbbBR" + cutkey
        bins['signalregion_ee'] = "eebbSR" + cutkey
        bins['mll_bkgregion_ee'] = "mll_eebbBR" + cutkey

        processes = {}
        p = Process('data_obs')
        #DoubleMuon_Run2015D_v1.1.0+7415-57-g4bff5ea_ZAAnalysis_b1377a8_histos.root
        p.prepare_process(path, 'data_obs', 'DoubleMuon_DoubleEG_Run2015D',
                          tag)
        processes['data_obs'] = p
        if DEBUG: print p
        # define signal
        # define backgrounds
        # zz
        p = Process('zz')
        p.prepare_process(
            path, 'zz',
            'ZZTo2L2Q_13TeV_amcatnloFXFX_madspin_pythia8_MiniAODv2', tag)
        processes['zz'] = p
        if DEBUG: print p

        # ttbar
        p = Process('ttbar')
        p.prepare_process(path, 'ttbar', 'TTTo2L2Nu_13TeV-powheg_MiniAODv2',
                          tag)
        processes['ttbar'] = p
        p = Process('ttbar')
        if DEBUG: print p
        '''
    # drell-yan
    p = Process('dy1')
    p.prepare_process(path, 'dy1', 'DYJetsToLL_M-10to50_TuneCUETP8M1_13TeV-amcatnloFXFX_MiniAODv2', tag)
    processes['dy1'] = p
    if DEBUG: print p
    '''
        p = Process('dy2')
        p.prepare_process(
            path, 'dy2',
            'DYJetsToLL_M-50_TuneCUETP8M1_13TeV-amcatnloFXFX_MiniAODv2', tag)
        processes['dy2'] = p
        if DEBUG: print p

        c.AddObservations([MASS], [ANALYSIS], [ERA], [CHANNEL], cats)
        c.AddProcesses([MASS], [ANALYSIS], [ERA], [CHANNEL], ['ZA'], cats,
                       True)
        c.AddProcesses([MASS], [ANALYSIS], [ERA], [CHANNEL],
                       ['ttbar', 'dy2', 'zz'], cats, False)
        c.cp().process(['ttbar', 'dy2', 'ZA']).AddSyst(
            c, "lumi", "lnN",
            ch.SystMap('channel', 'era', 'bin_id')([CHANNEL], [ERA],
                                                   [0, 1, 2, 3], 1.046))

        c.cp().process(['ttbar', 'dy2', 'ZA']).AddSyst(
            c, "trig", "lnN",
            ch.SystMap('channel', 'era', 'bin_id')([CHANNEL], [ERA],
                                                   [0, 1, 2, 3], 1.04))

        c.cp().process(['ttbar', 'dy2']).AddSyst(c, "btag", "shape",
                                                 ch.SystMap()(1.0))

        c.cp().process(['ttbar', 'dy2']).AddSyst(c, "jec", "shape",
                                                 ch.SystMap()(1.0))

        c.cp().process(['ttbar', 'dy2']).AddSyst(c, "jer", "shape",
                                                 ch.SystMap()(1.0))

        c.cp().process(['ttbar', 'dy2']).AddSyst(c, "pu", "shape",
                                                 ch.SystMap()(1.0))

        c.cp().process(['ttbar']).AddSyst(c, "TTpdf", "shape",
                                          ch.SystMap()(1.0))

        c.cp().process(['dy2']).AddSyst(c, "DYpdf", "shape", ch.SystMap()(1.0))

        c.cp().process(['dy2']).AddSyst(
            c, "DYnorm", "lnN",
            ch.SystMap('channel', 'era', 'bin_id')([CHANNEL], [ERA], [0, 1],
                                                   1.1))

        c.cp().process(['ttbar']).AddSyst(
            c, "TTnorm", "lnN",
            ch.SystMap('channel', 'era', 'bin_id')([CHANNEL], [ERA], [0], 1.1))

        nChannels = len(bins)
        nBackgrounds = len(
            [processes[x] for x in processes if processes[x].type > 0])
        nNuisances = 1

        systematics = {
            '': '',
            '_btagUp': '__btagup',
            '_btagDown': '__btagdown',
            '_jecUp': '__jecup',
            '_jecDown': '__jecdown',
            '_jerUp': '__jerup',
            '_jerDown': '__jerdown',
            '_puUp': '__puup',
            '_puDown': '__pudown',
            '_TTpdfUp': '__pdfup',
            '_TTpdfDown': '__pdfdown',
            '_DYpdfUp': '__pdfup',
            '_DYpdfDown': '__pdfdown'
        }
        outputRoot = "shapes.root"
        f = TFile(outputRoot, "recreate")
        f.Close()
        for b in bins:
            print b, bins[b]
            for p in processes:
                if p == 'data_obs':
                    file_in = TFile(processes[p].file, "READ")
                    print " Getting ", bins[b], " in file ", processes[p].file
                    h = file_in.Get(bins[b])
                    h.SetDirectory(0)
                    file_in.Close()
                    f = TFile(outputRoot, "update")
                    h.SetName("hist_" + bins[b] + "_" + p)
                    h.Write()
                    f.Write()
                    f.Close()

                else:
                    for s1, s2 in systematics.iteritems():
                        file_in = TFile(processes[p].file, "READ")
                        print " Getting ", bins[
                            b] + s2, " in file ", processes[p].file
                        h = file_in.Get(bins[b] + s2)
                        h.SetDirectory(0)
                        file_in.Close()
                        f = TFile(outputRoot, "update")
                        h.SetName("hist_" + bins[b] + "_" + p + s1)
                        h.Sumw2()
                        #h.Scale(processes[p].xsection * intL / processes[p].sumW)
                        h.Scale(intL)
                        h.Write()
                        f.Write()
                        f.Close()

        # Fill signal histograms FIXME: read efficiencies from eff.root

        eff_file = TFile("eff.root", "READ")
        effee_hist = eff_file.Get("effee")
        eff_ee = effee_hist.Interpolate(mA, mH)
        effmm_hist = eff_file.Get("effmm")
        eff_mm = effmm_hist.Interpolate(mA, mH)

        print "lumi : ", options.lumifb
        print "eff at ", mA, mH, ":", eff_ee, eff_mm
        print "ZA yields: ", options.lumifb * eff_mm, options.lumifb * eff_ee

        f = TFile(outputRoot, "update")
        h1 = TH1F("hist_" + bins['signalregion_mm'] + "_ZA",
                  "hist_" + bins['signalregion_mm'] + "_ZA", 1, 0, 1)
        h1.Fill(0.5, options.lumifb * eff_mm)
        h1.Write()

        h2 = TH1F("hist_" + bins['mll_bkgregion_mm'] + "_ZA",
                  "hist_" + bins['mll_bkgregion_mm'] + "_ZA", 60, 60, 120)
        h2.Write()

        h3 = TH1F("hist_" + bins['signalregion_ee'] + "_ZA",
                  "hist_" + bins['signalregion_ee'] + "_ZA", 1, 0, 1)
        h3.Fill(0.5, options.lumifb * eff_ee)
        h3.Write()

        h4 = TH1F("hist_" + bins['mll_bkgregion_ee'] + "_ZA",
                  "hist_" + bins['mll_bkgregion_ee'] + "_ZA", 60, 60, 120)
        h4.Write()

        f.Write()
        f.Close()

        c.cp().backgrounds().ExtractShapes(outputRoot, "hist_$BIN_$PROCESS",
                                           "hist_$BIN_$PROCESS_$SYSTEMATIC")
        c.cp().signals().ExtractShapes(outputRoot, "hist_$BIN_$PROCESS",
                                       "hist_$BIN_$PROCESS_$SYSTEMATIC")
        writer = ch.CardWriter(
            '$TAG/$MASS/$ANALYSIS_$CHANNEL_$ERA.dat',
            '$TAG/common/$ANALYSIS_$CHANNEL_$MASS.input_$ERA.root')
        writer.WriteCards('CARDS/', c)
예제 #8
0
                ch.SystMap('process')(['ttZ'], 0.919)(['ttH'], 0.926))

cb.cp().AddSyst(
    cb, 'pdf_qqbar', 'lnN',
    ch.SystMap('process')(['tH_YtMinus'], 1.048)(['ttW'], 1.072)(['WZ', 'ZZ'],
                                                                 1.040))

cb.cp().AddSyst(cb, 'pdf_qg', 'lnN', ch.SystMap('process')(['tHW'], 1.048))

print '>> Extracting histograms from input root files...'
file = aux_shapes + 'CERN/htt_th.inputs-sm-8TeV.root'
cb.cp().backgrounds().ExtractShapes(file, '$BIN/$PROCESS',
                                    '$BIN/$PROCESS_$SYSTEMATIC')
cb.cp().signals().ExtractShapes(file, '$BIN/$PROCESS$MASS',
                                '$BIN/$PROCESS$MASS_$SYSTEMATIC')

print '>> Generating bbb uncertainties...'
bbb = ch.BinByBinFactory()
bbb.SetAddThreshold(0.1).SetFixNorm(True)
bbb.AddBinByBin(cb.cp().process(['reducible']), cb)

print '>> Setting standardised bin names...'
ch.SetStandardBinNames(cb)
cb.PrintAll()

writer = ch.CardWriter('$TAG/$MASS/$ANALYSIS_$CHANNEL_$BINID_$ERA.txt',
                       '$TAG/common/$ANALYSIS_$CHANNEL.input.root')
writer.SetVerbosity(1)
writer.WriteCards('output/sm_cards/LIMITS', cb)

print '>> Done!'
예제 #9
0
import CombineHarvester.CombineTools.ch as ch

cb = ch.CombineHarvester()
cb.AddObservations(["120"], ["ana"], ["era"], ["chan"], [(0, "cat")])
cb.AddProcesses(["120"], ["ana"], ["era"], ["chan"], ["sig"], [(0, "cat")],
                True)
cb.AddProcesses(["120"], ["ana"], ["era"], ["chan"], ["bkg"], [(0, "cat")],
                False)

cb.cp().process(["bkg"]).AddSyst(cb, "sys", "shape", ch.SystMap()(1.0))
cb.cp().ExtractShapes("shapes.root", "$PROCESS", "$PROCESS_$SYSTEMATIC")
"""
bbb = ch.BinByBinFactory()
bbb.SetVerbosity(1)
bbb.SetAddThreshold(0.0)
bbb.SetMergeThreshold(0.0)
bbb.SetFixNorm(True)
#bbb.MergeBinErrors(cb.cp())
bbb.AddBinByBin(cb.cp(), cb)
cb.SetGroup("bbb", [".*_bin_\\d+"])
cb.SetGroup("syst_plus_bbb", [".*"])
cb.SetGroup("syst", ["sys"])
"""

cb.PrintAll()

writer = ch.CardWriter("datacard.txt", "shapes_ch.root")
writer.SetVerbosity(1)
writer.CreateDirectories(False)
writer.WriteCards("", cb)
예제 #10
0
assert (len(args) >= 2)

BIN = args[0]
KD = args[1]

print BIN, KD

cmb = ch.CombineHarvester()
cmb.SetFlag('check-negative-bins-on-import', 0)

cmb.ParseDatacard("datacards/" + BIN + "/" + KD + "/datacard.txt")

# Rebin Mode 1 : Starts from bin with lowest content which fails the condition. Tries moving left and right merging bins until threshold is met.
# Chooses from left and right to minimise number of bins lost
# Repeats with new lowest bin until all bins pass threshold
# SetBinUncertFraction : The threshold on the bin uncertainty fraction for which we consider merging bins

rebin = ch.AutoRebin()
rebin.SetBinThreshold(0.0)
rebin.SetBinUncertFraction(0.25)
rebin.SetRebinMode(1)
rebin.SetPerformRebin(True)
rebin.SetVerbosity(0)
rebin.Rebin(cmb, cmb)

writer = ch.CardWriter(
    'datacards/' + BIN + '/' + KD + '_Proc/datacard.txt',
    'datacards/' + BIN + '/' + KD + '_Proc/shapes/histos_' + BIN + '.root')

writer.WriteCards('', cmb)
예제 #11
0
gr_sig_inc = ['QCDScale_WG_PtBin_.*']
gir_bkg_th = ['QCDScale_.*NLO', 'pdf_VV', 'pdf_ttbar']

gr_th = gr_sig_th + gir_bkg_th
gr_expt = gr_lep_eff + gr_pho_eff + gr_pho_fakes + gr_lep_fakes + gr_other + gr_met_scale + gr_pho_scale

cb.SetGroup('lumi', gr_lumi)
cb.SetGroup('lep_eff', gr_lep_eff)
cb.SetGroup('pho_eff', gr_pho_eff)
cb.SetGroup('pho_fakes', gr_pho_fakes)
cb.SetGroup('lep_fakes', gr_lep_fakes)
cb.SetGroup('other', gr_other)
cb.SetGroup('met_scale', gr_met_scale)
cb.SetGroup('pho_scale', gr_pho_scale)
cb.SetGroup('sig_th', gr_sig_th)
cb.SetGroup('sig_inc', gr_sig_inc)
cb.SetGroup('bkg_th', gir_bkg_th)
cb.SetGroup('th', gr_th)
cb.SetGroup('expt', gr_expt)

cb.PrintParams()

writer = ch.CardWriter(
    '$TAG/$BIN.txt',
    '$TAG/common/$ANALYSIS.%s.%s.input.root' % (args.channel, args.year))
writer.SetWildcardMasses([])
writer.SetVerbosity(1)
writer.WriteCards(args.output, cb)

print '>> Done!'
예제 #12
0
        filepath = os.path.join(os.environ['CMSSW_BASE'],'src/TauFW/Fitter/MuTauFR/input', "MuTauFR_m_vis_eta%s_mt-2016.inputs.root")%(ieta)
        processName = '%s$BIN/$PROCESS'%(iwp)
        systematicName = '%s$BIN/$PROCESS_$SYSTEMATIC'%(iwp)
        cb.cp().backgrounds().ExtractShapes(filepath, processName, systematicName)
        cb.cp().signals().ExtractShapes(filepath, processName, systematicName)
        ch.SetStandardBinNames(cb, '$BIN') # Define the name of the category names
        #cb.SetAutoMCStats(cb, 0.0) # Introducing statistical uncertainties on the total background for each histogram bin (Barlow-Beeston lite approach)
        
        bbb = ch.BinByBinFactory()
        bbb.SetAddThreshold(0.1).SetMergeThreshold(0.5).SetFixNorm(True)
        bbb.MergeBinErrors(cb.cp().backgrounds())
        bbb.AddBinByBin(cb.cp().backgrounds(), cb)
        
        datacardPath = 'input/2016/MuTauFR/%s_eta%s.txt'%(iwp,ieta)
        shapePath = 'input/2016/MuTauFR/common/%s_eta%s.root'%(iwp,ieta)
        writer = ch.CardWriter(datacardPath,shapePath)
        writer.SetWildcardMasses([])
        writer.WriteCards('cmb', cb) # writing all datacards into one folder for combination
        #cb.PrintAll()
        #writer.WriteCards(channel, cb.cp().channel([channel])) # writing datacards for each final state in a corresponding folder to be able to perform the measurement individually in each final state
        print 'pre-fit fake rate:'
        print cb.cp().bin(['Pass']).process(['ZL']).GetRate() / ((cb.cp().bin(['Pass']).process(['ZL']).GetRate()+cb.cp().bin(['Fail']).process(['ZL']).GetRate()))

        sigRatePassPre = cb.cp().bin(['Pass']).process(['ZL']).GetRate()
        sigRateFailPre = cb.cp().bin(['Fail']).process(['ZL']).GetRate()
        sigErrPassPre = cb.cp().bin(['Pass']).process(['ZL']).GetUncertainty()
        sigErrFailPre = cb.cp().bin(['Fail']).process(['ZL']).GetUncertainty()
        
        dfdxPre = sigRateFailPre /((sigRatePassPre+sigRateFailPre)*(sigRatePassPre+sigRateFailPre))
        dfdyPre = -sigRatePassPre / ((sigRatePassPre+sigRateFailPre)*(sigRatePassPre+sigRateFailPre))
        errfakeratePrefit = math.sqrt((dfdxPre*sigErrPassPre)*(dfdxPre*sigErrPassPre)+(dfdyPre*sigErrFailPre)*(dfdyPre*sigErrFailPre))
예제 #13
0
# Start by calling everything syst and allsyst
cb.SetGroup('allsyst', ['.*'])
cb.SetGroup('syst', ['.*'])

# Then set lumi, and remove from both of the above
cb.SetGroup('lumi', ['lumi_.*'])
cb.RemoveGroup('syst', ['lumi_.*'])
cb.RemoveGroup('allsyst', ['lumi_.*'])

# Then tauid, and remove it only from syst
cb.SetGroup('tauid', ['CMS_eff_t'])
cb.RemoveGroup('syst', ['CMS_eff_t'])

# Now we can split into:
#    - stat + syst + tauid + lumi   ..or..
#    - stat + allsyst + lumi

cb.PrintAll()

#####################################################################################
# Write the cards
#####################################################################################
writer = ch.CardWriter('$TAG/datacard.txt', '$TAG/shapes.root')
writer.SetWildcardMasses([])  # We don't use the $MASS property here
writer.SetVerbosity(1)
x = writer.WriteCards('output/LIMITS/cmb', cb)  # All cards combined
print x
x['output/LIMITS/cmb/datacard.txt'].PrintAll()
for chn in channels:  # plus a subdir per channel
    writer.WriteCards('output/LIMITS/%s' % chn, cb.cp().channel([chn]))
예제 #14
0
    #(['2017'], 1.026))
    cb.cp().process(['signal']).AddSyst(cb, 'DsNorm_$ERA', 'lnN',
                                        ch.SystMap('era')(['2017'], 1.05))
    cb.cp().process(['signal']).AddSyst(cb, 'BRDsPhiPi_$ERA', 'lnN',
                                        ch.SystMap('era')(['2017'], 1.08))
    cb.cp().process(['signal']).AddSyst(cb, 'BRBtoTau_$ERA', 'lnN',
                                        ch.SystMap('era')(['2017'], 1.11))
    cb.cp().process(['signal']).AddSyst(cb, 'BRBtoD_$ERA', 'lnN',
                                        ch.SystMap('era')(['2017'], 1.16))
    cb.cp().process(['signal']).AddSyst(cb, 'fUnc_$ERA', 'lnN',
                                        ch.SystMap('era')(['2017'], 1.11))
    cb.cp().process(['signal']).AddSyst(cb, 'DpmScaling_$ERA', 'lnN',
                                        ch.SystMap('era')(['2017'], 1.03))
    cb.cp().process(['signal']).AddSyst(cb, 'BsScaling_$ERA', 'lnN',
                                        ch.SystMap('era')(['2017'], 1.12))
    print '>> Extracting histograms from input root files...'

    file = args.input_file
    cb.cp().backgrounds().ExtractShapes(file, '$PROCESS',
                                        '$PROCESS_$SYSTEMATIC')
    cb.cp().signals().ExtractShapes(file, '$PROCESS', '$PROCESS_$SYSTEMATIC')

    cb.PrintAll()

    writer = ch.CardWriter('outcard_' + args.cat + '.dat',
                           'input_file_' + args.cat + '.root')
    writer.SetWildcardMasses([])
    writer.CreateDirectories(False)

    writer.WriteCards('LIMITS', cb)
예제 #15
0
# energy resolution
# CMS_res_$X (X = e, m, t, g, j, met, b)

# b tag
# CMS_btag_comb or (CMS_btag_light and CMS_btag_heavy)
# (or more complicated with iterative fit)

# load histograms
cb.cp().backgrounds().ExtractShapes(inname, '$BIN/$PROCESS',
                                    '$BIN/$PROCESS_$SYSTEMATIC')
cb.cp().signals().ExtractShapes(inname, '$BIN/$PROCESS',
                                '$BIN/$PROCESS_$SYSTEMATIC')

# old way of doing bin by bin stats
#bbb = ch.BinByBinFactory()
#bbb.SetAddThreshold(0.0).SetMergeThreshold(1.0).SetFixNorm(False)
##bbb.MergeBinErrors(cb.cp().backgrounds())
#bbb.AddBinByBin(cb.cp().backgrounds(), cb)
#bbb.AddBinByBin(cb.cp().signals(), cb)

# TODO: autoMCStats through harvester...

ch.SetStandardBinNames(cb)

# save datacard
writer = ch.CardWriter('datacards/$TAG/$MASS/$ANALYSIS_$ERA.txt',
                       'datacards/$TAG/common/$ANALYSIS.input.root')
writer.WriteCards('combined', cb)
for chan in channels:
    writer.WriteCards(chan, cb.cp().channel([chan]))
예제 #16
0
# cb.GetParameter('muon_fr').set_val(1.5)
# cb.GetParameter('muon_fr').set_range(0.5, 2.5)

cb.cp().process(['ZTT']).bin_id([1]).AddSyst(
    cb, 'pass', 'rateParam',
    ch.SystMap()(('(%g)' % eff_initial, 'effsf')))

cb.cp().process(['ZTT']).bin_id([0]).AddSyst(
    cb, 'fail', 'rateParam',
    ch.SystMap()(('(1-@0*%g)' % eff_initial, 'effsf')))

cb.cp().process(['W']).bin_id([1]).AddSyst(
    cb, 'passW', 'rateParam',
    ch.SystMap()(('(%g)' % eff_W_initial, 'fakesf')))

cb.cp().process(['W']).bin_id([0]).AddSyst(
    cb, 'failW', 'rateParam',
    ch.SystMap()(('(1-@0*%g)' % eff_W_initial, 'fakesf')))

# print '>> Setting standardised bin names...'
# ch.SetStandardBinNames(cb)
# cb.PrintAll()

writer = ch.CardWriter(
    '$TAG/$ANALYSIS_%s.txt' % (args.prefix + args.dir),
    '$TAG/$ANALYSIS_%s.input.root' % (args.prefix + args.dir))
writer.SetWildcardMasses([])
writer.SetVerbosity(1)
writer.WriteCards(args.output, cb)

print '>> Done!'
예제 #17
0
import ROOT as r 
import CombineHarvester.CombineTools.ch as ch
import os

for card in os.listdir('.'):
    if '.txt' not in card: continue
    cb = ch.CombineHarvester() 
    theChan=None
    theYear=None
    for chan in ['ttH_2lss_0tau','ttH_2lss_1tau','ttH_3l_0tau','ttH_4l','ttH_cr_3l','ttH_cr_4l']:
        if card.startswith(chan): theChan=chan
    #for year in ['2016','2017','2018']:
    for year in ['2018']:
        if year in card: theYear=year
    if not theChan:
        print card, theChan
        raise RuntimeError()
    print card, chan
    cb.ParseDatacard( card, "$CHANNEL.txt" )
    #cb.ForEachProc( lambda p : cb.cp().process([p.process()]).RenameSystematic( cb,  'CMS_ttHl_pileup', 'CMS_ttHl_pileup_%s_%s'%(theChan,theYear)))
    cb.ForEachProc( lambda p : cb.cp().process([p.process()]).RenameSystematic( cb,  'CMS_res_j_endcap1_2018', 'CMS_res_j_endcap1_%s_%s'%(theChan,theYear)))
    writer = ch.CardWriter('new_split/%s'%card, 'new_split/%s.root'%(card.replace('.txt','')))
    writer.WriteCards('.',cb)
    
#out = r.TFile.Open('combined.root')
#cmb.WriteDataCard('combined.kk',out)
예제 #18
0
            or (sys.channel() in ['tt'] and sys.bin_id() >= 2)):
        sys.set_name(sys.name() + '_vbf')


cmb.ForEachSyst(ModUEPS)

cmb.cp().syst_name([
    'QCDscale_ggH2in'
]).ForEachSyst(lambda sys: sys.set_name('QCDscale_ggH2in_vbf'))


def FixMe(sys):
    if sys.process().startswith('ggH_hww') and sys.name() == 'pdf_qqbar':
        print sys
        sys.set_process(sys.process().replace('ggH', 'qqH'))
        print sys


cmb.ForEachSyst(FixMe)

writer_htt = ch.CardWriter(
    '$TAG/$MASS/$ANALYSIS_$CHANNEL_$BINID_$ERA.txt',
    '$TAG/common/$ANALYSIS_$CHANNEL.input_$ERA_lhchcg.root')
writer_htt.SetVerbosity(1)
writer_htt.WriteCards('output/htt-YR3-hpt-bbH', cmb.cp().analysis(['htt']))

writer_vhtt = ch.CardWriter('$TAG/$MASS/$ANALYSIS_$BINID_$ERA.txt',
                            '$TAG/common/$ANALYSIS.input_$ERA_lhchcg.root')
writer_vhtt.SetVerbosity(1)
writer_vhtt.WriteCards('output/htt-YR3-hpt-bbH', cmb.cp().analysis(['vhtt']))
예제 #19
0
    args = parser.parse_args()
    cb = ch.CombineHarvester()

    sig_procs = ['signal']
    bkg_procs = ['background']

    cb.AddObservations(['*'], ['t3m'], ['2017'], ['l'], [(1, 'bin1')])
    cb.AddProcesses(['*'], ['t3m'], ['2017'], ['l'], bkg_procs, [(0, 'bin1')],
                    False)
    cb.AddProcesses(['*'], ['t3m'], ['2017'], ['l'], sig_procs, [(0, 'bin1')],
                    True)

    cb.cp().process(['signal',
                     'background']).AddSyst(cb, 'lumi_$ERA', 'lnN',
                                            ch.SystMap('era')(['2017'], 1.026))

    print '>> Extracting histograms from input root files...'

    file = args.input_file
    cb.cp().backgrounds().ExtractShapes(file, '$PROCESS',
                                        '$PROCESS_$SYSTEMATIC')
    cb.cp().signals().ExtractShapes(file, '$PROCESS', '$PROCESS_$SYSTEMATIC')

    cb.PrintAll()

    writer = ch.CardWriter('outcard.dat', 'input_file.root')
    writer.SetWildcardMasses([])
    writer.CreateDirectories(False)

    writer.WriteCards('LIMITS', cb)
예제 #20
0
                            proc_name = name
            if "HIG-18-019" in process:
                complement = "_2017"
            if "HIG-17-018" in process:
                complement = "_2016"
            cb.ParseDatacard(process, analysis=proc_name + complement, mass="")
            if not btag_correlated and "HIG-18-019" in process:
                print "start decorrelating btag"
                for s in ["HF", "LF", "cErr1", "cErr2"]:
                    print "renaming for " + s
                    cb.ForEachProc(decorrelate_btag)
            if not JES_correlated and "HIG-18-019" in process:
                cb.ForEachProc(decorrelate_JES)
            writer = ch.CardWriter(
                os.getcwd() + "/" + mom_result + proc_name + complement +
                '.txt',
                os.getcwd() + "/" + mom_result + proc_name + complement +
                '.root')
            writer.WriteCards('LIMITS/cmb', cb)

    everybody = glob.glob(os.getcwd() + "/" + mom_result + "*.txt")

    if btag_correlated:
        cardToWrite = "card_combo_2016_2017_btag_correlated"
        cardToWrite_2017 = "card_combo_2017_btag_correlated"
    else:
        cardToWrite = "card_combo_2016_2017_JES_Notcorrelated"
        cardToWrite_2017 = "card_combo_2017_JES_Notcorrelated"

    if combine_cards:
        string_combine = "combineCards.py "