Ejemplo n.º 1
0
Archivo: zp.py Proyecto: amkalsi/hlim
def add_bbb(cb):
    print '>> Merging bin errors and generating bbb uncertainties...'
    bbb = ch.BinByBinFactory()
    bbb.SetAddThreshold(0.1).SetMergeThreshold(0.5).SetFixNorm(True)

    for chn in chns:
        cb_chn = cb.cp().channel([chn])
        bbb.MergeAndAdd(
            cb_chn.cp().era([era]).bin_id([0]).process(bkg_procs[chn]), cb)
Ejemplo n.º 2
0
	def add_bin_by_bin_uncertainties(self, processes, add_threshold=0.1, merge_threshold=0.5, fix_norm=True):
		bin_by_bin_factory = ch.BinByBinFactory()
		if log.isEnabledFor(logging.DEBUG):
			bin_by_bin_factory.SetVerbosity(100)

		bin_by_bin_factory.SetAddThreshold(add_threshold)
		bin_by_bin_factory.SetMergeThreshold(merge_threshold)
		bin_by_bin_factory.SetFixNorm(fix_norm)

		bin_by_bin_factory.MergeBinErrors(self.cb.cp().process(processes))
		bin_by_bin_factory.AddBinByBin(self.cb.cp().process(processes), self.cb)
	def add_bin_by_bin_uncertainties(self, processes, add_threshold=0.1, merge_threshold=0.5, fix_norm=True):
		bin_by_bin_factory = ch.BinByBinFactory()
		if log.isEnabledFor(logging.DEBUG):
			bin_by_bin_factory.SetVerbosity(100)

		bin_by_bin_factory.SetAddThreshold(add_threshold)
		bin_by_bin_factory.SetMergeThreshold(merge_threshold)
		bin_by_bin_factory.SetFixNorm(fix_norm)

		bin_by_bin_factory.MergeBinErrors(self.cb.cp().process(processes))
		bin_by_bin_factory.AddBinByBin(self.cb.cp().process(processes), self.cb)
		#ch.SetStandardBinNames(self.cb) # TODO: this line seems to mix up the categories

		self.cb.SetGroup("bbb", [".*_bin_\\d+"])
		self.cb.SetGroup("syst_plus_bbb", [".*"])
Ejemplo n.º 4
0
 def add_bin_by_bin_systematics(self, processes, add_threshold,
                                merge_threshold, fix_norm):
     if not self._shapes_extracted:
         logger.fatal("Shapes need to be extracted first.")
         raise Exception
     bbb = ch.BinByBinFactory()
     if logger.isEnabledFor(logging.DEBUG):
         bbb.SetVerbosity(1)
     bbb.SetAddThreshold(add_threshold)
     bbb.SetMergeThreshold(merge_threshold)
     bbb.SetFixNorm(fix_norm)
     bbb.MergeBinErrors(self.cb.cp().process(processes))
     bbb.AddBinByBin(self.cb.cp().process(processes), self.cb)
     self.cb.SetGroup("bbb", [".*_bin_\\d+"])
     self.cb.SetGroup("syst_plus_bbb", [".*"])
Ejemplo n.º 5
0
def add_bbb(cb):
    print '>> Merging bin errors and generating bbb uncertainties...'
    bbb = ch.BinByBinFactory()
    bbb.SetAddThreshold(0.1).SetMergeThreshold(0.5).SetFixNorm(False)

    for chn in chns:
        cb_chn = cb.cp().channel([chn])
        if 'isCR' in chn:
            bbb.MergeAndAdd(
                cb_chn.cp().era([era]).bin_id([0, 1, 2,
                                               3]).process(bkg_procs[chn]), cb)
            bbb.MergeAndAdd(
                cb_chn.cp().era([era]).bin_id([0, 1, 2, 3]).process(sig_procs),
                cb)
        else:
            bbb.MergeAndAdd(
                cb_chn.cp().era([era]).bin_id([0, 1, 2, 3, 4, 5, 6,
                                               7]).process(bkg_procs[chn]), cb)
            bbb.MergeAndAdd(
                cb_chn.cp().era([era]).bin_id([0, 1, 2, 3, 4, 5, 6,
                                               7]).process(sig_procs), cb)
Ejemplo n.º 6
0
                MC_shape_syst_era_3 = MC_shape_syst_era_2 + "_tau"
            else :
                MC_shape_syst_era_3 = MC_shape_syst_era_2 + "_" + channel
            cb.cp().process(procs_for_shape).RenameSystematic(cb, MC_shape_syst_era_2, MC_shape_syst_era_3)
            print ("renamed " + MC_shape_syst_era_2 + " as shape uncertainty to MC prcesses to " + MC_shape_syst_era_3)
        if  "Clos_t_norm" in specific_syst or  "Clos_t_shape" in specific_syst:
            MC_shape_syst_era_3 = MC_shape_syst_era_2 + "_" + channel
            cb.cp().process(procs_for_shape).RenameSystematic(cb, MC_shape_syst_era_2, MC_shape_syst_era_3)
            print ("renamed " + MC_shape_syst_era_2 + " as shape uncertainty to MC prcesses to " + MC_shape_syst_era_3)

########################################
if ( not ( signal_type == "none" and mass == "none" and HHtype == "none" )) and options.output_file=="none" :
    output_file =  "%s_%s_%s_%s" % (output_file, HHtype, signal_type, mass )

if binByBin:
    bbb = ch.BinByBinFactory()
    bbb.SetAddThreshold(0.1).SetFixNorm(False)
    #bbb.SetAddThreshold(0.1).SetMergeThreshold(0.5).SetFixNorm(True)
    #bbb.MergeBinErrors(cb.cp().backgrounds())
    bbb.AddBinByBin(cb.cp().backgrounds(),cb)
    sysnames = cb.syst_name_set()
    for sysname in sysnames:
        if "CMS___" in sysname:
            binname = sysname[sysname.find("bin"):]
            process = sysname[sysname.find("13TeV_")+6:sysname.find("bin")-1]
            newname="CMS_multilepton_mcStat_%s_%s_%s_%s"%(channel,era,process,binname)
            cb.cp().process([process]).RenameSystematic(cb, sysname, newname)
bins = cb.bin_set()
for b in bins :
    print ("\n Output file: " + output_file + ".txt", b )
    cb.cp().bin([b]).mass(["*"]).WriteDatacard(output_file + ".txt" , output_file + ".root")
Ejemplo n.º 7
0
def  writeCard(input,theLambda,select,region=-1) :
	print "writing cards"
	variables =[]
	if opt.isResonant : variables.append('HHKin_mass_raw')
	else : variables.append('MT2')

	#out_dir = opt.outDir
	theOutputDir = "{0}{1}{2}".format(theLambda,select,variables[0])
	dname = "_"+opt.channel+opt.outDir
	out_dir = "cards{1}/{0}/".format(theOutputDir,dname)
	print out_dir
	#in_dir = "/grid_mnt/vol__vol_U__u/llr/cms/ortona/diHiggs/CMSSW_7_4_7/src/KLUBAnalysis/combiner/cards_MuTauprova/HHSM2b0jMcutBDTMT2/";
	cmb1 = ch.CombineHarvester()
	cmb1.SetFlag('workspaces-use-clone', True)

	cmd = "mkdir -p {0}".format(out_dir)
	print cmd
	regionName = ["","regB","regC","regD"]
	regionSuffix = ["SR","SStight","OSinviso","SSinviso"]
	status, output = commands.getstatusoutput(cmd)   
	#outFile = opt.outDir+"/chCard{0}{2}_{1}_{3}.txt".format(theLambda,opt.channel,regionName[region+1],select)
	thechannel = "1"
	if opt.channel == "MuTau" : thechannel="2"
	elif opt.channel == "TauTau" : thechannel = "3"

	if "0b0j" in select : theCat = "0"
	if "2b0j" in select : theCat = "2"
	elif "1b1j" in select : theCat = "1"
	elif "boosted" in select : theCat = "3"

	outFile = "hh_{0}_C{1}_L{2}_13TeV.txt".format(thechannel,theCat,theLambda)
	file = open( "temp.txt", "wb")


	#read config
	categories = []
	#for icat in range(len(input.selections)) :
	#	categories.append((icat, input.selections[icat]))
	categories.append((0,select))
	backgrounds=[]
	MCbackgrounds=[]
	processes=[]
	processes.append(lambdaName)
	inRoot = TFile.Open(opt.filename)
	for bkg in input.background:
		#Add protection against empty processes => If I remove this I could build all bins at once instead of looping on the selections
		templateName = "{0}_{1}_SR_{2}".format(bkg,select,variables[0])
		print templateName
		template = inRoot.Get(templateName)
		if template.Integral()>0.000001 :
			backgrounds.append(bkg)
			processes.append(bkg)
			if bkg is not "QCD" :
				MCbackgrounds.append(bkg)

	#print backgrounds
	allQCD = False
	allQCDs = [0,0,0,0]
	for regionsuff in range(len(regionSuffix)) :
		for ichan in range(len(backgrounds)):
			if "QCD" in backgrounds[ichan] :
				fname = "data_obs"
				if regionSuffix[regionsuff] == "SR" :
					fname="QCD"
				templateName = "{0}_{1}_{3}_{2}".format(fname,select,variables[0],regionSuffix[regionsuff])
				template = inRoot.Get(templateName)
				#allQCDs.append(template.Integral())
				allQCDs[regionsuff]= allQCDs[regionsuff]+template.Integral()
				iQCD = ichan
			elif regionSuffix[regionsuff] is not "SR" :
				templateName = "{0}_{1}_{3}_{2}".format(backgrounds[ichan],select,variables[0],regionSuffix[regionsuff])
				template = inRoot.Get(templateName)
				allQCDs[regionsuff] = allQCDs[regionsuff] - template.Integral()

	if allQCDs[0]>0 and allQCDs[1]>0 and allQCDs[2]>0 and allQCDs[3]>0 : allQCD = True
	for i in range(4) : print allQCDs[i]
	#add processes to CH
	#masses->125 
	#analyses->Res/non-Res(HHKin_fit,MT2)
	#eras->13TeV 
	#channels->mutau/tautau/etau 
	#bin->bjet categories
	#print signals, signals[0]
	cmb1.AddObservations([theLambda.replace(lambdaName,"")], variables, ['13TeV'], [opt.channel], categories)
	cmb1.AddProcesses([theLambda.replace(lambdaName,"")], variables, ['13TeV'], [opt.channel], backgrounds, categories, False)
	cmb1.AddProcesses([theLambda.replace(lambdaName,"")], variables, ['13TeV'], [opt.channel], [lambdaName], categories, True) #signals[0]

	if region < 0 :

		#Systematics (I need to add by hand the shape ones)
		#potrei sostituire theLambda con "signal"
		#syst = systReader("../config/systematics.cfg",[theLambda],backgrounds,file)
		syst = systReader("../config/systematics.cfg",[lambdaName],backgrounds,file)
		syst.writeOutput(False)
		syst.verbose(True)
		if(opt.channel == "TauTau" ): 
			syst.addSystFile("../config/systematics_tautau.cfg")
		elif(opt.channel == "MuTau" ): 
			syst.addSystFile("../config/systematics_mutau.cfg")
			#if(opt.isResonant):
			#	syst.addSystFile("../config/systematics_resonant.cfg")
			#else : syst.addSystFile("../config/systematics_nonresonant.cfg")
		elif(opt.channel == "ETau" ): 
			syst.addSystFile("../config/systematics_etau.cfg")
			#if(opt.isResonant):
			#	syst.addSystFile("../config/systematics_resonant.cfg")
			#else : syst.addSystFile("../config/systematics_nonresonant.cfg")
		if opt.theory : syst.addSystFile("../config/syst_th.cfg")
		syst.writeSystematics()

		for isy in range(len(syst.SystNames)) :
			if "CMS_scale_t" in syst.SystNames[isy] or "CMS_scale_j" in syst.SystNames[isy]: continue
			for iproc in range(len(syst.SystProcesses[isy])) :
				if "/" in syst.SystValues[isy][iproc] :
					f = syst.SystValues[isy][iproc].split("/")
					systVal = (float(f[0]),float(f[1]))
				else :
					systVal = float(syst.SystValues[isy][iproc])
				#print isy, iproc, systVal
				print "adding Syst",systVal,syst.SystNames[isy],syst.SystTypes[isy],"to",syst.SystProcesses[isy][iproc]
				cmb1.cp().process([syst.SystProcesses[isy][iproc]]).AddSyst(cmb1, syst.SystNames[isy],syst.SystTypes[isy],ch.SystMap('channel','bin_id')([opt.channel],[0],systVal))
		if opt.shapeUnc > 0:
			jesproc = MCbackgrounds
			jesproc.append(lambdaName)
			if "1b1j" in select and opt.channel == "TauTau" : jesproc.remove("DY0b")
			cmb1.cp().process(jesproc).AddSyst(cmb1, "CMS_scale_j_13TeV","shape",ch.SystMap('channel','bin_id')([opt.channel],[0],1.000))
			cmb1.cp().process(jesproc).AddSyst(cmb1, "CMS_scale_t_13TeV","shape",ch.SystMap('channel','bin_id')([opt.channel],[0],1.000))
			cmb1.cp().process(["TT"]).AddSyst(cmb1, "top","shape",ch.SystMap('channel','bin_id')([opt.channel],[0],1.000))

	    #	$BIN        --> proc.bin()
	    #	$PROCESS    --> proc.process()
	    #	$MASS       --> proc.mass()
	    #	$SYSTEMATIC --> syst.name()
#		cmb1.cp().ExtractShapes(
#			opt.filename,
#			"$PROCESS_$BIN_{1}_{0}".format(variables[0],regionSuffix[region+1]),
#			"$PROCESS_$BIN_{1}_{0}_$SYSTEMATIC".format(variables[0],regionSuffix[region+1]))
		cmb1.cp().backgrounds().ExtractShapes(
			opt.filename,
			"$PROCESS_$BIN_{1}_{0}".format(variables[0],regionSuffix[region+1]),
			"$PROCESS_$BIN_{1}_{0}_$SYSTEMATIC".format(variables[0],regionSuffix[region+1]))
		cmb1.cp().signals().ExtractShapes(
			opt.filename,
			"$PROCESS$MASS_$BIN_{1}_{0}".format(variables[0],regionSuffix[region+1]),
			"$PROCESS$MASS_$BIN_{1}_{0}_$SYSTEMATIC".format(variables[0],regionSuffix[region+1]))

		bbb = ch.BinByBinFactory()
		bbb.SetAddThreshold(0.1).SetMergeThreshold(0.5).SetFixNorm(True)
		bbbQCD = ch.BinByBinFactory()
		bbbQCD.SetAddThreshold(0.0).SetMergeThreshold(0.5).SetFixNorm(True)
		if opt.binbybin : 
			bbb.MergeBinErrors(cmb1.cp().process(MCbackgrounds))
			bbbQCD.MergeBinErrors(cmb1.cp().process(["QCD"]))
			bbbQCD.AddBinByBin(cmb1.cp().process(["QCD"]), cmb1)
			bbb.AddBinByBin(cmb1.cp().process(MCbackgrounds), cmb1)
		#cmb1.cp().PrintProcs().PrintSysts()

		#outroot = TFile.Open(opt.outDir+"/chCard{0}{2}_{1}_{3}.input.root".format(theLambda,opt.channel,regionName[region+1],select),"RECREATE")
		#outtxt = "hh_{0}_C{1}_L{2}_13TeV.txt".format(theChannel,theCat,theHHLambda)
		outroot = TFile.Open(out_dir+"hh_{0}_C{1}_L{2}_13TeV.input.root".format(thechannel,theCat,theLambda),"RECREATE")
		cmb1.WriteDatacard(out_dir+outFile,out_dir+"hh_{0}_C{1}_L{2}_13TeV.input.root".format(thechannel,theCat,theLambda))
		if allQCD :
			file = open( out_dir+outFile, "a")	
			file.write("alpha rateParam {0} QCD (@0*@1/@2) QCD_regB,QCD_regC,QCD_regD".format(select))
	elif allQCD :
		#print thechannel,theCat,theLambda #,regionName2[region+1]
		#outFile = "hh_{0}_C{1}_L{2}_13TeV.txt".format(thechannel,theCat,theLambda)
		#print region, allQCD
		#print regionName2[region+1]
		#print outFile
		#print "hh_"+thechannel#+"_C"+theCat+"_L"+theLambda+"_13TeV_"+regionName[region+1]+".txt"
		#print "hh_"+thechannel+"_C"+theCat#+"_L"+theLambda+"_13TeV_"+regionName[region+1]+".txt"
		#print "hh_"+thechannel+"_C"+theCat+"_L"+theLambda#+"_13TeV_"+regionName[region+1]+".txt"
		#print "hh_"+thechannel+"_C"+theCat+"_L"+theLambda+"_13TeV_"#+regionName[region+1]+".txt"
		#print outFile
		outFile = "hh_{0}_C{1}_L{2}_13TeV_{3}.txt".format(thechannel,theCat,theLambda,regionName[region+1])
		file = open( out_dir+outFile, "wb")

		file.write("imax 1\n")
		file.write("jmax {0}\n".format(len(backgrounds)-1))
		file.write("kmax *\n")

		file.write("------------\n")
		file.write("shapes * * FAKE\n".format(opt.channel,regionName[region+1]))
		file.write("------------\n")

		templateName = "data_obs_{1}_{3}_{2}".format(bkg,select,variables[0],regionSuffix[region+1])
		template = inRoot.Get(templateName)        
		file.write("bin {0} \n".format(select))
		obs = template.GetEntries()
		file.write("observation {0} \n".format(obs))

		file.write("------------\n")

		file.write("bin ")        
		for chan in backgrounds:
			file.write("{0} ".format(select))
		file.write("\n")      

		file.write("process ")
		for chan in backgrounds:
			file.write("{0} ".format(chan))
		#file.write("QCD ")
		file.write("\n")

		file.write("process ")
		for chan in range(len(backgrounds)): #+1 for the QCD
			file.write("{0} ".format(chan+1))
		file.write("\n")

		file.write("rate ")
		rates = []
		iQCD = -1
		totRate = 0
		for ichan in range(len(backgrounds)):
			if "QCD" in backgrounds[ichan] :
				rates.append(-1)
				iQCD = ichan
			else :
				templateName = "{0}_{1}_{3}_{2}".format(backgrounds[ichan],select,variables[0],regionSuffix[region+1])
				template = inRoot.Get(templateName)
				#print templateName
				brate = template.Integral()
				rates.append(brate)
				totRate = totRate + brate
		if iQCD >= 0 : rates[iQCD] = TMath.Max(0.0000001,obs-totRate)
		for ichan in range(len(backgrounds)):
			file.write("{0:.4f} ".format(rates[ichan]))
		file.write("\n")
		file.write("------------\n")
		file.write("QCD_{0} rateParam  {1} QCD 1 \n".format(regionName[region+1],select))
def prepareShapes(backgrounds, signals, discriminant, discriminantName):
    # Backgrounds is a list of string of the considered backgrounds corresponding to entries in processes_mapping 
    # Signals is a list of string of the considered signals corresponding to entries in processes_mapping 
    # discriminant is the corresponding entry in the dictionary discriminants 

    import CombineHarvester.CombineTools.ch as ch
    root_path = options.root_path

    file, systematics = prepareFile(processes_mapping, discriminants, root_path, discriminantName)
    
    for signal in signals :
        cb = ch.CombineHarvester()
        cb.AddObservations(['*'], [''], ['_%s'%options.dataYear], [''], discriminant)
        cb.AddProcesses(['*'], [''], ['_%s'%options.dataYear], [''], [signal], discriminant, True)
        if options.dataYear == '2016':
            cb.AddProcesses(['*'], [''], ['_%s'%options.dataYear], [''], backgrounds, discriminant, False)
        else:
            if not 'b2j3' in discriminantName:
                try: backgrounds.remove('qcd')
                except: pass
            else:
                if not 'qcd' in backgrounds: backgrounds.append('qcd')
            if 'all' in discriminantName:
                if signal == 'Hut':
                    discriminant.remove((1, 'DNN_Hut_b2j3'))
                    cb.AddProcesses(['*'], [''], ['_%s'%options.dataYear], [''], backgrounds+['qcd'], [(1,'DNN_Hut_b2j3')], False)
                else:
                    discriminant.remove((1, 'DNN_Hct_b2j3'))
                    cb.AddProcesses(['*'], [''], ['_%s'%options.dataYear], [''], backgrounds+['qcd'], [(1,'DNN_Hct_b2j3')], False)
                cb.AddProcesses(['*'], [''], ['_%s'%options.dataYear], [''], backgrounds, discriminant, False)
                if signal == 'Hut': discriminant.append((1, 'DNN_Hut_b2j3'))
                else:               discriminant.append((1, 'DNN_Hct_b2j3'))
            else:
                cb.AddProcesses(['*'], [''], ['_%s'%options.dataYear], [''], backgrounds, discriminant, False)

        # Systematics
        if not options.nosys:
            for systematic in systematics:
                systematic_only_for_SMtt = False
                for systSMtt in options.sysForSMtt:
                    if CMSNamingConvention(systSMtt) == systematic:
                        systematic_only_for_SMtt = True
                if not systematic_only_for_SMtt:
                    cb.cp().AddSyst(cb, systematic, 'shape', ch.SystMap()(1.00))
                else:
                    #cb.cp().AddSyst(cb, '$PROCESS_'+systematic, 'shape', ch.SystMap('process')(['ttother', 'ttlf', 'ttbj', 'tthad', 'ttfullLep'], 1.00))
                    cb.cp().AddSyst(cb, systematic, 'shape', ch.SystMap('process')(smTTlist, 1.00))

            cb.cp().AddSyst(cb, 'CMS_lumi', 'lnN', ch.SystMap()(options.luminosityError))
            cb.cp().AddSyst(cb, 'tt_xsec', 'lnN', ch.SystMap('process')(['ttbb', 'ttcc', 'ttlf'], 1.055))
            cb.cp().AddSyst(cb, 'Other_xsec', 'lnN', ch.SystMap('process')(['other'], 1.1))

            for i in xrange(len(discriminant)):
                if 'b2j3' in discriminant[i][1]:
                    cb.cp().AddSyst(cb, '$PROCESS_norm', 'lnN', ch.SystMap('process')(['qcd'], 1.5))
            #if options.dataYear == '2016':
            #    cb.cp().AddSyst(cb, 'hdamp_2016', 'lnN', ch.SystMap('process')(['ttbb', 'ttcc', 'ttlf'], 1.05))
            #    cb.cp().AddSyst(cb, 'scale_2016', 'lnN', ch.SystMap('process')(['ttbb', 'ttcc', 'ttlf'], 1.15))
            #    for i in xrange(len(discriminant)):
            #        if 'j3' in discriminant[i][1]:
            #            cb.cp().AddSyst(cb, '$PROCESS_norm_j3', 'lnN', ch.SystMap('bin', 'process')([discriminant[i][1]], ['ttbb'], 1.5))
            #            cb.cp().AddSyst(cb, '$PROCESS_norm_j3', 'lnN', ch.SystMap('bin', 'process')([discriminant[i][1]], ['ttcc'], 1.5))
            #            cb.cp().AddSyst(cb, 'jec_2016', 'lnN', ch.SystMap('bin', 'process')([discriminant[i][1]], ['ttbb', 'ttcc', 'ttlf', 'other', signal], 1.01))
            #        else:
            #            cb.cp().AddSyst(cb, '$PROCESS_norm_j4', 'lnN', ch.SystMap('bin', 'process')([discriminant[i][1]], ['ttbb'], 1.5))
            #            cb.cp().AddSyst(cb, '$PROCESS_norm_j4', 'lnN', ch.SystMap('bin', 'process')([discriminant[i][1]], ['ttcc'], 1.5))
            #            cb.cp().AddSyst(cb, 'jec_2016', 'lnN', ch.SystMap('bin', 'process')([discriminant[i][1]], ['ttbb', 'ttcc', 'ttlf', 'other', signal], 1.05))#1.05 for j4
            #else:
            for i in xrange(len(discriminant)):
                if 'j3' in discriminant[i][1]:
                    cb.cp().AddSyst(cb, '$PROCESS_norm_j3', 'lnN', ch.SystMap('bin', 'process')([discriminant[i][1]], ['ttbb'], 1.3))
                    cb.cp().AddSyst(cb, '$PROCESS_norm_j3', 'lnN', ch.SystMap('bin', 'process')([discriminant[i][1]], ['ttcc'], 1.5))
                else:
                    cb.cp().AddSyst(cb, '$PROCESS_norm_j4', 'lnN', ch.SystMap('bin', 'process')([discriminant[i][1]], ['ttbb'], 1.3))
                    cb.cp().AddSyst(cb, '$PROCESS_norm_j4', 'lnN', ch.SystMap('bin', 'process')([discriminant[i][1]], ['ttcc'], 1.5))


        if options.SF :
            print "Background renormalization is deprecated! Exitting..."
            sys.exit(1)
            cb.cp().AddSyst(cb, 'SF_$PROCESS', 'rateParam', ch.SystMap('process')(['ttbb'], 1.))

        # Import shapes from ROOT file
        cb.cp().backgrounds().ExtractShapes(file, '$BIN/$PROCESS', '$BIN/$PROCESS__$SYSTEMATIC')
        cb.cp().signals().ExtractShapes(file, '$BIN/$PROCESS', '$BIN/$PROCESS__$SYSTEMATIC')

        #rebin = ch.AutoRebin().SetBinThreshold(100).SetBinUncertFraction(0.1)
        #rebin.Rebin(cb.cp(), cb)

        # Bin by bin uncertainties
        if not options.nobbb:
            print "Treating bbb"
            bbb = ch.BinByBinFactory()
            #bbb.SetAddThreshold(0.1).SetMergeThreshold(0.5).SetFixNorm(True)
            bbb.SetAddThreshold(0.1)
            #bbb.SetAddThreshold(0.0001)
            bbb.AddBinByBin(cb.cp().backgrounds(), cb)
            bbb.AddBinByBin(cb.cp().signals(), cb)
        else:
            print "Treating bbb ONLY for qcd"
            bbb = ch.BinByBinFactory()
            bbb.SetAddThreshold(0.1)
            bbb.AddBinByBin(cb.cp().backgrounds().process(['qcd']), cb)

        if options.nosys and options.nobbb : 
            cb.cp().AddSyst(cb, '$ERA_lumi', 'lnN', ch.SystMap('era')(['%s'%options.dataYear], 1.00001)) # Add a negligible systematic (chosen to be lumi) to trick combine

        output_prefix = 'FCNC_%s_Discriminant_%s' % (signal, discriminantName)

        output_dir = os.path.join(options.output, '%s' % (signal))
        if not os.path.exists(output_dir):
            os.makedirs(output_dir)

        fake_mass = '125'

        # Write card
        datacard = os.path.join(output_dir, output_prefix + '.dat')
        cb.cp().mass([fake_mass, "*"]).WriteDatacard(os.path.join(output_dir, output_prefix + '.dat'), os.path.join(output_dir, output_prefix + '_shapes.root'))

        # Write small script to compute the limit
        workspace_file = os.path.basename(os.path.join(output_dir, output_prefix + '_combine_workspace.root'))
        script = """#! /bin/bash

text2workspace.py {datacard} -m {fake_mass} -o {workspace_root}

# Run limit

echo combine -M AsymptoticLimits -n {name} {workspace_root} -S {systematics} --run expected #-v +2
combine -M AsymptoticLimits -n {name} {workspace_root} -S {systematics} --run expected #-v +2
#combine -H AsymptoticLimits -M HybridNew -n {name} {workspace_root} -S {systematics} --LHCmode LHC-limits --expectedFromGrid 0.5 #for ecpected, use 0.84 and 0.16
""".format(workspace_root=workspace_file, datacard=os.path.basename(datacard), name=output_prefix, fake_mass=fake_mass, systematics=(0 if options.nosys else 1))
        script_file = os.path.join(output_dir, output_prefix + '_run_limits.sh')
        with open(script_file, 'w') as f:
            f.write(script)
        
        st = os.stat(script_file)
        os.chmod(script_file, st.st_mode | stat.S_IEXEC)


        # Write small script for datacard checks
        script = """#! /bin/bash

# Run checks
echo combine -M FitDiagnostics -t -1 --expectSignal 0 {datacard} -n fitDiagnostics_{name}_bkgOnly -m 125 --robustHesse 1 --robustFit=1 --rMin -20 --rMax 20 #--plots
echo python ../../../../HiggsAnalysis/CombinedLimit/test/diffNuisances.py -a fitDiagnostics_{name}_bkgOnly.root -g fitDiagnostics_{name}_bkgOnly_plots.root
combine -M FitDiagnostics -t -1 --expectSignal 0 {datacard} -n _{name}_bkgOnly -m 125 --robustHesse 1 --robustFit=1 --rMin -20 --rMax 20 #--plots
python ../../../../HiggsAnalysis/CombinedLimit/test/diffNuisances.py -a fitDiagnostics_{name}_bkgOnly.root -g fitDiagnostics_{name}_bkgOnly_plots.root > fitDiagnostics_{name}_bkgOnly.log
python ../../printPulls.py fitDiagnostics_{name}_bkgOnly_plots.root
combine -M FitDiagnostics -t -1 --expectSignal 1 {datacard} -n _{name}_bkgPlusSig -m 125 --robustHesse 1 --robustFit=1 --rMin -20 --rMax 20 #--plots
python ../../../../HiggsAnalysis/CombinedLimit/test/diffNuisances.py -a fitDiagnostics_{name}_bkgPlusSig.root -g fitDiagnostics_{name}_bkgPlusSig_plots.root > fitDiagnostics_{name}_bkgPlusSig.log
python ../../printPulls.py fitDiagnostics_{name}_bkgPlusSig_plots.root

#print NLL for check
combineTool.py -M FastScan -w {name}_combine_workspace.root:w -o {name}_nll
""".format(workspace_root=workspace_file, datacard=os.path.basename(datacard), name=output_prefix, fake_mass=fake_mass, systematics=(0 if options.nosys else 1))
        script_file = os.path.join(output_dir, output_prefix + '_run_closureChecks.sh')
        with open(script_file, 'w') as f:
            f.write(script)
        
        st = os.stat(script_file)
        os.chmod(script_file, st.st_mode | stat.S_IEXEC)

        # Write small script for impacts
        script = """#! /bin/bash

# Run impacts
combineTool.py -M Impacts -d {name}_combine_workspace.root -m 125 --doInitialFit --robustFit=1 --robustHesse 1 --rMin -20 --rMax 20 -t -1
combineTool.py -M Impacts -d {name}_combine_workspace.root -m 125 --robustFit=1 --robustHesse 1 --doFits --rMin -20 --rMax 20 -t -1 --parallel 32
combineTool.py -M Impacts -d {name}_combine_workspace.root -m 125 -o {name}_expected_impacts.json --rMin -20 --rMax 20 -t -1
plotImpacts.py -i {name}_expected_impacts.json -o {name}_expected_impacts --per-page 40

combineTool.py -M Impacts -d {name}_combine_workspace.root -m 125 --doInitialFit --robustFit=1 --robustHesse 1 --rMin -20 --rMax 20
combineTool.py -M Impacts -d {name}_combine_workspace.root -m 125 --robustFit=1 --doFits --robustHesse 1 --rMin -20 --rMax 20 --parallel 32
combineTool.py -M Impacts -d {name}_combine_workspace.root -m 125 -o {name}_impacts.json --rMin -20 --rMax 20
plotImpacts.py -i {name}_impacts.json -o {name}_impacts --per-page 40
""".format(workspace_root=workspace_file, datacard=os.path.basename(datacard), name=output_prefix, fake_mass=fake_mass, systematics=(0 if options.nosys else 1))
        script_file = os.path.join(output_dir, output_prefix + '_run_impacts.sh')
        with open(script_file, 'w') as f:
            f.write(script)
        
        st = os.stat(script_file)
        os.chmod(script_file, st.st_mode | stat.S_IEXEC)

        # Write small script for postfit shapes
        script = """#! /bin/bash

# Run postfit
echo combine -M FitDiagnostics {datacard} -n _{name}_postfit --saveNormalizations --saveShapes --saveWithUncertainties --preFitValue 0 --rMin -20 --rMax 20 --robustHesse 1 --robustFit=1 -v 1
combine -M FitDiagnostics {datacard} -n _{name}_postfit --saveNormalizations --saveShapes --saveWithUncertainties --preFitValue 0 --rMin -20 --rMax 20 --robustHesse 1 --robustFit=1 -v 1 #--plots
PostFitShapesFromWorkspace -w {name}_combine_workspace.root -d {datacard} -o postfit_shapes_{name}.root -f fitDiagnostics_{name}_postfit.root:fit_b --postfit --sampling
python ../../convertPostfitShapesForPlotIt.py -i postfit_shapes_{name}.root
$CMSSW_BASE/src/UserCode/HEPToolsFCNC/plotIt/plotIt -o postfit_shapes_{name}_forPlotIt ../../postfit_plotIt_config_{coupling}_{year}.yml -y
$CMSSW_BASE/src/UserCode/HEPToolsFCNC/plotIt/plotIt -o postfit_shapes_{name}_forPlotIt ../../postfit_plotIt_config_{coupling}_{year}_qcd.yml -y
""".format(workspace_root=workspace_file, datacard=os.path.basename(datacard), name=output_prefix, fake_mass=fake_mass, systematics=(0 if options.nosys else 1), coupling=("Hut" if "Hut" in output_prefix else "Hct"), year=options.dataYear)
        script_file = os.path.join(output_dir, output_prefix + '_run_postfit.sh')
        with open(script_file, 'w') as f:
            f.write(script)
        
        st = os.stat(script_file)
        os.chmod(script_file, st.st_mode | stat.S_IEXEC)
Ejemplo n.º 9
0
        cb.cp().process(['QCDmujets']).AddSyst(cb, 'QCD_mu_norm', 'lnN',
                                               ch.SystMap()(2.))
    if args.limit == 'electrons' or args.limit == 'cmb':
        cb.cp().process(['QCDejets']).AddSyst(cb, 'QCD_el_norm', 'lnN',
                                              ch.SystMap()(2.))

    print '>> Extracting histograms from input root files...'
    in_file = args.inputfile
    cb.cp().backgrounds().ExtractShapes(in_file, '$BIN/$PROCESS',
                                        '$BIN/$PROCESS_$SYSTEMATIC')
    cb.cp().signals().ExtractShapes(in_file, '$BIN/$PROCESS$MASS',
                                    '$BIN/$PROCESS$MASS_$SYSTEMATIC')
    # in_file, '$BIN/$PROCESS', '$BIN/$PROCESS__$SYSTEMATIC')

    if addBBB:
        bbb = ch.BinByBinFactory().SetAddThreshold(0.).SetFixNorm(False)
        bbb.MergeBinErrors(cb.cp().backgrounds())
        bbb.AddBinByBin(cb.cp().backgrounds(), cb)

    print '>> Setting standardised bin names...'
    ch.SetStandardBinNames(cb)
    cb.PrintAll()

    writer = ch.CardWriter('$TAG/$MASS/$ANALYSIS_$CHANNEL_$BINID.txt',
                           '$TAG/$ANALYSIS_$CHANNEL.input.root')
    # writer.SetVerbosity(100)
    writer.WriteCards('output/{mode}'.format(mode=mode), cb)
    print 'Try writing cards...'
    # import ROOT
    # f_out = ROOT.TFile('andrey_out.root', 'RECREATE')
    # cb.WriteDatacard("andrey_out.txt", 'andrey_out.root')