def morph_hist(self, file, directory, name, lower, upper, value) :
     """
     Load histograms with name NAME that correspond to upper and lower bound,
     determine morphed histogram corresponding to VALUE, and write the morphed
     histogram to file.
     """
     #print "loading fingerprint: ", file, directory, name
     hist_lower = self.zero_safe(self.load_hist(file, directory, name.format(MASS=lower)))
     hist_upper = self.zero_safe(self.load_hist(file, directory, name.format(MASS=upper)))
     norm = self.norm_hist(hist_lower, hist_upper, float(lower), float(upper), float(value))
     if self.trivial :
         if abs(float(value)-float(lower)) < abs(float(upper)-float(value)) :
             hist_morph = hist_lower.Clone(name.format(MASS=value)); hist_morph.SetTitle(name.format(MASS=value)); hist_morph.Scale(norm/hist_morph.Integral())
         else :
             hist_morph = hist_upper.Clone(name.format(MASS=value)); hist_morph.SetTitle(name.format(MASS=value)); hist_morph.Scale(norm/hist_morph.Integral())
     else :
         hist_morph = th1fmorph(name.format(MASS=value),name.format(MASS=value),hist_lower, hist_upper, float(lower), float(upper), float(value), norm, 0)
     # th1fmorph() will set a value null if you are right on top of it
     if not hist_lower and lower == value:
         hist_lower = hist_morph
     if not hist_upper and upper == value:
         hist_upper = hist_morph
     if self.verbose :
         print "writing morphed histogram to file: name =", hist_morph.GetName(), "integral =[ %.5f | %.5f | %.5f ]" % (hist_lower.Integral(), hist_morph.Integral(), hist_upper.Integral())
     if directory == "" :
         file.cd()
     else :
         file.cd(directory)
     hist_morph.Write(hist_morph.GetName())
 def morph_hist(self, file, directory, name, lower, upper, value):
     """
     Load histograms with name NAME that correspond to upper and lower bound,
     determine morphed histogram corresponding to VALUE, and write the morphed
     histogram to file.
     """
     #print "loading fingerprint: ", file, directory, name
     hist_lower = self.zero_safe(
         self.load_hist(file, directory, name.format(MASS=lower)))
     hist_upper = self.zero_safe(
         self.load_hist(file, directory, name.format(MASS=upper)))
     norm = self.norm_hist(hist_lower, hist_upper, float(lower),
                           float(upper), float(value))
     hist_morph = th1fmorph(name.format(MASS=value),
                            name.format(MASS=value), hist_lower, hist_upper,
                            float(lower), float(upper), float(value), norm,
                            0)
     # th1fmorph() will set a value null if you are right on top of it
     if not hist_lower and lower == value:
         hist_lower = hist_morph
     if not hist_upper and upper == value:
         hist_upper = hist_morph
     if self.verbose:
         print "writing morphed histogram to file: name =", hist_morph.GetName(
         ), "integral =[ %.5f | %.5f | %.5f ]" % (hist_lower.Integral(),
                                                  hist_morph.Integral(),
                                                  hist_upper.Integral())
     if directory == "":
         file.cd()
     else:
         file.cd(directory)
     hist_morph.Write(hist_morph.GetName())
Exemplo n.º 3
0
def morph(name, title, x, hist1, x1, hist2, x2):
    '''
    Given to histograms, <hist1> and <hist2>, which correspond to the parameters
    <x1> and <x2>, return a TH1 with the given name and title, morphed to
    correspond to parameter <x>.

    The total yield for the target histogram is interpolated between the two
    base histograms.
    '''

    yield1 = hist1.Integral()
    yield2 = hist2.Integral()

    interp_yield = interpolate(x1, yield1, x2, yield2, x)

    return th1fmorph(name, title, hist1, hist2, x1, x2, x, interp_yield, 0)
Exemplo n.º 4
0
def morph(name, title, x, hist1, x1, hist2, x2):
    '''
    Given to histograms, <hist1> and <hist2>, which correspond to the parameters
    <x1> and <x2>, return a TH1 with the given name and title, morphed to
    correspond to parameter <x>.

    The total yield for the target histogram is interpolated between the two
    base histograms.
    '''

    yield1 = hist1.Integral()
    yield2 = hist2.Integral()

    interp_yield = interpolate(x1, yield1, x2, yield2, x)

    return th1fmorph(name, title, hist1, hist2, x1, x2, x, interp_yield, 0)
 def single_template(self, dir, proc, mass, label, scale, MODE='MORPHED', debug=False) :
     """
     Return a single template histogram for a given dir, proc, mass and label. The histogram will be scaled by scale
     (corresponding to the cross section times BR of the corresponding Higgs boson). Scale will be modified by a linear
     interpolation scale taking into account differences in acceptance and reconstruction efficiency as a function of
     mass. Two modes exist to determine the template: MORPHED - will use horizontal template morphing (the morphing will
     always be applied from the pivotal, which is closest to mass to minimize uncertainties from the interplation);
     NEAREST_NEIGHBOUR - will use the closest mass point in the list of pivotals w/o any additional horizontal inter-
     polation. 
     """
     ## window of closest pivotal masses below/above mass. Window can be None, if no pivotals exist for a given dir. In
     ## this case return None       
     if self.ana_type=="Htaunu" and mass=="" : 
         single_template = self.load_hist(dir+'/'+proc+label).Clone(proc+label+'_template'); single_template.Scale(scale)
         return single_template
     window = self.pivotal_mass_window(float(mass), self.pivotals[dir])
     if not window :
         return None
     if float(window[0]) == float(mass) and float(mass) == float(window[1]) :
         ## exact match with pivotal: clone exact pivotal
         single_template = self.load_hist(dir+'/'+proc+mass+label).Clone(proc+mass+label+'_template'); single_template.Scale(scale)
     elif float(window[0]) > float(mass) :
         ## mass out of bounds of pivotals (too small)
         single_template = self.load_hist(dir+'/'+proc+window[0]+label).Clone(proc+window[0]+label+'_template'); single_template.Scale(scale) 
     elif float(window[1]) < float(mass) :           
         ## mass out of bounds of pivotals (too large)
         single_template = self.load_hist(dir+'/'+proc+window[1]+label).Clone(proc+window[1]+label+'_template'); single_template.Scale(scale)
     else :
         ## mass somewhere between pivotals: masses is the tuple of the embracing pivotals, histos is the tuple of
         ## corresponding template histograms. The closest pivotal to mass is the first element in each of the tuples,
         ## the further away pivotal is second.
         if (float(mass) - float(window[0])) < (float(window[1]) - float(mass)) :
             ## lower bound pivotal closer to mass
             masses = (float(window[0]),float(window[1]))
             histos = (self.load_hist(dir+'/'+proc+window[0]+label),self.load_hist(dir+'/'+proc+window[1]+label))
         else :
             ## upper bound pivotal closer to mass
             masses = (float(window[1]),float(window[0]))
             histos = (self.load_hist(dir+'/'+proc+window[1]+label),self.load_hist(dir+'/'+proc+window[0]+label))
         scale*= self.interpolation_scale((float(masses[0]),histos[0].Integral()), (float(masses[1]),histos[1].Integral()), float(mass), debug)
         if MODE == 'MORPHED' :
             single_template = th1fmorph(proc+str(mass)+label+'_template', proc+mass+label, histos[0], histos[1], masses[0], masses[1], float(mass), scale*histos[0].Integral(), 0)
         if MODE == 'NEAREST_NEIGHBOUR' :
             single_template = histos[0].Clone(proc+str(mass)+label+'_template'); single_template.Scale(scale)
     return single_template
 def morph_hist(self, file, directory, name, lower, upper, value) :
     """
     Load histograms with name NAME that correspond to upper and lower bound,
     determine morphed histogram corresponding to VALUE, and write the morphed
     histogram to file.
     """
     #print "loading fingerprint: ", file, directory, name
     hist_lower = self.zero_safe(self.load_hist(file, directory, name.format(MASS=lower)))
     hist_upper = self.zero_safe(self.load_hist(file, directory, name.format(MASS=upper)))
     norm = self.norm_hist(hist_lower, hist_upper, float(lower), float(upper), float(value))
     if self.trivial :
         if abs(float(value)-float(lower)) < abs(float(upper)-float(value)) :
             hist_morph = hist_lower.Clone(name.format(MASS=value)); hist_morph.SetTitle(name.format(MASS=value)); hist_morph.Scale(norm/hist_morph.Integral())
         else :
             hist_morph = hist_upper.Clone(name.format(MASS=value)); hist_morph.SetTitle(name.format(MASS=value)); hist_morph.Scale(norm/hist_morph.Integral())
     else :
         hist_morph = th1fmorph(name.format(MASS=value),name.format(MASS=value),hist_lower, hist_upper, float(lower), float(upper), float(value), norm, 0)
     # th1fmorph() will set a value null if you are right on top of it
     if not hist_lower and lower == value:
         hist_lower = hist_morph
     if not hist_upper and upper == value:
         hist_upper = hist_morph
     if self.verbose :
         if not "_fine_binning" in name:
             print "writing morphed histogram to file: name =", hist_morph.GetName(), "integral =[ %.5f | %.5f | %.5f ]" % (hist_lower.Integral(), hist_morph.Integral(), hist_upper.Integral())
         else: 
             print "writing morphed '_fine_binning' histogram to file: name =", hist_morph.GetName().replace("_fine_binning",""), "integral =[ %.5f | %.5f | %.5f ]" % (hist_lower.Integral(), hist_morph.Integral(), hist_upper.Integral())
     if directory == "" :
         file.cd()
     else :
         file.cd(directory)
     if "_fine_binning" in name:
         #rebin to the usual binning using the histogram without the '_fine_binning' label
         self.rebin_hist(hist_morph,self.load_hist(file, directory, hist_lower.GetName().replace("_fine_binning","").format(MASS=lower)))
         #save removing 'fine_binning' from the name
         hist_morph.Write(hist_morph.GetName().replace("_fine_binning",""))
     else : 
         hist_morph.Write(hist_morph.GetName())
Exemplo n.º 7
0
def main(args, opt):
	os.system('mkdir -p %s'%opt.outDir)
	mcfiles = {}   # procname -> filename
	datafiles = {} # procname -> filename
	try:
		for fname in os.listdir(args[0]):
			if not osp.splitext(fname)[1] == '.root': continue
			isdata,procname,splitno = resolveFilename(fname)
			if isdata:
				if not procname in datafiles:
					datafiles[procname] = []
				datafiles[procname].append(osp.join(args[0],fname))
			else:
				if 'QCD' in procname:                   continue ## exclude QCD
				if procname == 'TTJets_MSDecays_172v5': continue ## have those already
				if 'SingleT' in procname:               continue ## have those already

				if not procname in mcfiles:
					mcfiles[procname] = []
				mcfiles[procname].append(osp.join(args[0],fname))

	except IndexError:
		print "Please provide a valid input directory"
		exit(-1)


	## Produce (or read) the histogram data
	bghistos = makeBackgroundHistos(mcfiles, opt)

	cachefile = open(".xsecweights.pck", 'r')
	xsecweights = pickle.load(cachefile)
	cachefile.close()
	print '>>> Read xsec weights from cache (.xsecweights.pck)'

	cachefile = open(".svldyscalefactors.pck", 'r')
	dySFs = pickle.load(cachefile)
	cachefile.close()
	print '>>> Read DY scale factors from cache (.svldyscalefactors.pck)'

	cachefile = open(".svlqcdtemplates.pck", 'r')
	qcdTemplates = pickle.load(cachefile)
	cachefile.close()
	print '>>> Read QCD templates from cache (.svlqcdtemplates.pck)'

	## Read SV Track multiplicity weights:
	from extractNtrkWeights import extractNTrkWeights
	ntkWeights = extractNTrkWeights()

	## Now add them up with proper scales
	mcprocesses = [k for k in mcfiles.keys() if not 'Data8TeV' in k]
	bghistos_added = sumBGHistos(processes=mcprocesses,
		                         bghistos=bghistos,
		                         xsecweights=xsecweights,
		                         ntkWeights=ntkWeights,
		                         dySFs=dySFs,
		                         qcdTemplates=qcdTemplates,
		                         opt=opt)

	bghistos_added_dyup = sumBGHistos(processes=mcprocesses,
		                         bghistos=bghistos,
		                         xsecweights=xsecweights,
		                         ntkWeights=ntkWeights,
		                         dySFs=dySFs,
		                         qcdTemplates=qcdTemplates,
		                         opt=opt,
		                         dyScale=1.3)
	bghistos_added_dydn = sumBGHistos(processes=mcprocesses,
		                         bghistos=bghistos,
		                         xsecweights=xsecweights,
		                         ntkWeights=ntkWeights,
		                         dySFs=dySFs,
		                         qcdTemplates=qcdTemplates,
		                         opt=opt,
		                         dyScale=0.7)
	bghistos_added_qcdup = sumBGHistos(processes=mcprocesses,
		                         bghistos=bghistos,
		                         xsecweights=xsecweights,
		                         ntkWeights=ntkWeights,
		                         dySFs=dySFs,
		                         qcdTemplates=qcdTemplates,
		                         opt=opt,
		                         qcdScale=1.1)
	bghistos_added_qcddn = sumBGHistos(processes=mcprocesses,
		                         bghistos=bghistos,
		                         xsecweights=xsecweights,
		                         ntkWeights=ntkWeights,
		                         dySFs=dySFs,
		                         qcdTemplates=qcdTemplates,
		                         opt=opt,
		                         qcdScale=0.9)

	## Produce data histograms
	datahistos = makeDataHistos(datafiles, opt)
	datahistos_added = sumDataHistos(datafiles.keys(), datahistos)
	# Rebin also data, if required:
	if opt.rebin>0:
		for hist in datahistos_added.values():
			hist.Rebin(opt.rebin)

	## Save the background only shapes separately as templates for the fit
	cachefile = open(".svlbgtemplates.pck", 'w')
	pickle.dump(bghistos_added, cachefile, pickle.HIGHEST_PROTOCOL)
	print '>>> Dumped bg templates to cache (.svlbgtemplates.pck)'
	cachefile.close()

	## Read syst histos:
	cachefile = open(".svlsysthistos.pck", 'r')
	systhistos = pickle.load(cachefile)
	print '>>> Read systematics histograms from cache (.svlsysthistos.pck)'
	cachefile.close()

	## Read mass scan histos:
	cachefile = open(".svlmasshistos.pck", 'r')
	masshistos = pickle.load(cachefile)
	print '>>> Read mass scan histograms from cache (.svlmasshistos.pck)'
	# (tag, chan, mass, comb)      -> histo
	# (tag, chan, mass, comb, ntk) -> histo
	cachefile.close()

	ofi = ROOT.TFile.Open(osp.join(opt.outDir,'pe_inputs.root'),'RECREATE')
	ofi.cd()

	#####################################################
	## Central mass point and syst samples
	for syst in ([s for s,_,_,_ in ALLSYSTS] +
	             ['dyup','dydown','qcdup','qcddown','ntkmult']):
		odir = ofi.mkdir(syst + '_172v5')
		odir.cd()
		for tag,_,_ in SELECTIONS:
			for ntk,_ in NTRKBINS:
				hname = "SVLMass_%s_%s_%s" % (tag,syst+'_172v5',ntk)
				if not syst in ['dyup','dydown','qcdup','qcddown','ntkmult',
				                'tchscaleup','tchscaledown',
				                'twchscaleup','twchscaledown']:
					hfinal = systhistos[(tag,syst,'tot',ntk)].Clone(hname)
				else:
					hfinal = systhistos[(tag,'nominal','tot',ntk)].Clone(hname)
				try:
					## Systs from separate samples
					if syst in ['tchscaleup','tchscaledown',
					            'twchscaleup','twchscaledown']:
						scale = LUMI*xsecweights[CHANMASSTOPROCNAME[('tt', 172.5)]]
					else:
						scale = LUMI*xsecweights[SYSTTOPROCNAME[syst][0]]
				except KeyError:
					## Systs from event weights
					scale = LUMI*xsecweights[CHANMASSTOPROCNAME[('tt', 172.5)]]
				hfinal.Scale(scale)

				## Renormalize some variations with event weights
				if syst in SYSTSTOBERENORMALIZED:
					normintegral = systhistos[(tag,'nominal','tot',ntk)].Integral()
					normintegral *= LUMI*xsecweights[CHANMASSTOPROCNAME[('tt', 172.5)]]
					normintegral /= hfinal.Integral()
					hfinal.Scale(normintegral)

				## Add single top
				stProcs=['t', 'tbar', 'tW', 'tbarW']
				stSystProcs=[]
				if 'tchscale' in syst:
					stProcs=['tW', 'tbarW']
					stSystProcs=['t', 'tbar']
				if 'twchscale' in syst:
					stProcs=['t', 'tbar']
					stSystProcs=['tW', 'tbarW']
				for st in stProcs:
					hsinglet = masshistos[(tag, st, 172.5,'tot',ntk)].Clone('%s_%s'%(hname,st))
					hsinglet.Scale(LUMI*xsecweights[CHANMASSTOPROCNAME[(st, 172.5)]])
					hfinal.Add(hsinglet)
				for st in stSystProcs:
					hsinglet = systhistos[(tag, syst, 'tot', ntk)].Clone('%s_%s'%(hname,st))
					hsinglet.Scale(LUMI*xsecweights[CHANMASSTOPROCNAME[(st, 172.5)]])
					hfinal.Add(hsinglet)


				## Add the backgrounds
				if not syst in ['dyup','dydown','qcdup','qcddown']:
					hfinal.Add(bghistos_added[(tag,ntk)])
				else: ## From the scaled bghistos if necessary
					bghistos_added_scaled = {
						'dyup'    : bghistos_added_dyup,
						'dydown'  : bghistos_added_dydn,
						'qcdup'   : bghistos_added_qcdup,
						'qcddown' : bghistos_added_qcddn,
					}[syst]
					hfinal.Add(bghistos_added_scaled[(tag,ntk)])

				## Rebin if requested
				if opt.rebin>0:
					hfinal.Rebin(opt.rebin)

				## Scale by SV track multiplicity weights:
				if not syst == 'ntkmult':
					hfinal.Scale(ntkWeights['inclusive'][ntk])

				## Write out to file
				hfinal.Write(hname, ROOT.TObject.kOverwrite)

	#####################################################
	## Non-central mass points
	ROOT.gSystem.Load('libUserCodeTopMassSecVtx.so')
	from ROOT import th1fmorph
	# extract mass points from dictionary
	mass_points = sorted(list(set([key[2] for key in masshistos.keys()])))
	mass_points = mass_points[1:-1] # remove outermost points
	debughistos = []
	for mass in mass_points:
		if mass == 172.5: continue
		mname = 'nominal_%s' % str(mass).replace('.','v')
		odir = ofi.mkdir(mname)
		odir.cd()
		for tag,_,_ in SELECTIONS:
			for ntk,_ in NTRKBINS:
				hname = "SVLMass_%s_%s_%s" % (tag,mname,ntk)
				hfinal = masshistos[(tag,'tt',mass,'tot',ntk)].Clone(hname)
				hfinal.Scale(LUMI*xsecweights[CHANMASSTOPROCNAME[('tt', mass)]])

				## Add single top (t-channel, for which we have the samples)
				for st in ['t', 'tbar']:
					hsinglet = masshistos[(tag, st, mass,'tot',ntk)].Clone('%s_%s'%(hname,st))
					hsinglet.Scale(LUMI*xsecweights[CHANMASSTOPROCNAME[(st, mass)]])
					hfinal.Add(hsinglet)

				## Add single top (tW-channel, for which we don't have samples)
				## Morph between the two extreme mass points to get
				## the non existing ones
				for st in ['tW', 'tbarW']:
					if mass not in [166.5, 178.5]:
						hsingletW = th1fmorph('%s_%s_morph'%(hname,st),
							                  '%s_%s_morphed'%(hname,st),
							                   masshistos[(tag, 'tW', 166.5,'tot',ntk)],
							                   masshistos[(tag, 'tW', 178.5,'tot',ntk)],
							                   166.5, 178.5, mass,
							                   masshistos[(tag, 'tW', 166.5,'tot',ntk)].Integral())
						hsingletW.Scale(LUMI*xsecweights[CHANMASSTOPROCNAME[(st, 166.5)]]
							                * TWXSECS[mass]/TWXSECS[166.5])
						hsingletW.SetDirectory(0)
					else:
						hsingletW = masshistos[(tag, st, mass,'tot',ntk)].Clone('%s_%s'%(hname,st))
						hsingletW.Scale(LUMI*xsecweights[CHANMASSTOPROCNAME[(st, mass)]])
					hfinal.Add(hsingletW)

				## Add the combined backgrounds
				hfinal.Add(bghistos_added[(tag,ntk)])

				## Rebin if requested
				if opt.rebin>0:
					hfinal.Rebin(opt.rebin)

				## Scale by SV track multiplicity weights:
				hfinal.Scale(ntkWeights['inclusive'][ntk])

				## Write out to file
				hfinal.Write(hname, ROOT.TObject.kOverwrite)

	## Write also data histos
	ofi.cd()
	odir = ofi.mkdir('data')
	odir.cd()
	for tag,_,_ in SELECTIONS:
		for ntk,_ in NTRKBINS:
			hname = "SVLMass_%s_data_%s" % (tag,ntk)
			datahistos_added[(tag,ntk)].Write(hname, ROOT.TObject.kOverwrite)


	print ('>>> Wrote pseudo experiment inputs to file (%s)' %
		                      osp.join(opt.outDir,'pe_inputs.root'))

	ofi.Write()
	ofi.Close()

	return 0
Exemplo n.º 8
0
def main(args, opt):
    os.system('mkdir -p %s' % opt.outDir)
    mcfiles = {}  # procname -> filename
    datafiles = {}  # procname -> filename
    try:
        for fname in os.listdir(args[0]):
            if not osp.splitext(fname)[1] == '.root': continue
            isdata, procname, splitno = resolveFilename(fname)
            if isdata:
                if not procname in datafiles:
                    datafiles[procname] = []
                datafiles[procname].append(osp.join(args[0], fname))
            else:
                if 'QCD' in procname: continue  ## exclude QCD
                if procname == 'TTJets_MSDecays_172v5':
                    continue  ## have those already
                if 'SingleT' in procname: continue  ## have those already

                if not procname in mcfiles:
                    mcfiles[procname] = []
                mcfiles[procname].append(osp.join(args[0], fname))

    except IndexError:
        print "Please provide a valid input directory"
        exit(-1)

    ## Produce (or read) the histogram data
    bghistos = makeBackgroundHistos(mcfiles, opt)

    cachefile = open(".xsecweights.pck", 'r')
    xsecweights = pickle.load(cachefile)
    cachefile.close()
    print '>>> Read xsec weights from cache (.xsecweights.pck)'

    cachefile = open(".svldyscalefactors.pck", 'r')
    dySFs = pickle.load(cachefile)
    cachefile.close()
    print '>>> Read DY scale factors from cache (.svldyscalefactors.pck)'

    cachefile = open(".svlqcdtemplates.pck", 'r')
    qcdTemplates = pickle.load(cachefile)
    cachefile.close()
    print '>>> Read QCD templates from cache (.svlqcdtemplates.pck)'

    ## Read SV Track multiplicity weights:
    from extractNtrkWeights import extractNTrkWeights
    ntkWeights = extractNTrkWeights()

    ## Now add them up with proper scales
    mcprocesses = [k for k in mcfiles.keys() if not 'Data8TeV' in k]
    bghistos_added = sumBGHistos(processes=mcprocesses,
                                 bghistos=bghistos,
                                 xsecweights=xsecweights,
                                 ntkWeights=ntkWeights,
                                 dySFs=dySFs,
                                 qcdTemplates=qcdTemplates,
                                 opt=opt)

    bghistos_added_dyup = sumBGHistos(processes=mcprocesses,
                                      bghistos=bghistos,
                                      xsecweights=xsecweights,
                                      ntkWeights=ntkWeights,
                                      dySFs=dySFs,
                                      qcdTemplates=qcdTemplates,
                                      opt=opt,
                                      dyScale=1.3)
    bghistos_added_dydn = sumBGHistos(processes=mcprocesses,
                                      bghistos=bghistos,
                                      xsecweights=xsecweights,
                                      ntkWeights=ntkWeights,
                                      dySFs=dySFs,
                                      qcdTemplates=qcdTemplates,
                                      opt=opt,
                                      dyScale=0.7)
    bghistos_added_qcdup = sumBGHistos(processes=mcprocesses,
                                       bghistos=bghistos,
                                       xsecweights=xsecweights,
                                       ntkWeights=ntkWeights,
                                       dySFs=dySFs,
                                       qcdTemplates=qcdTemplates,
                                       opt=opt,
                                       qcdScale=1.1)
    bghistos_added_qcddn = sumBGHistos(processes=mcprocesses,
                                       bghistos=bghistos,
                                       xsecweights=xsecweights,
                                       ntkWeights=ntkWeights,
                                       dySFs=dySFs,
                                       qcdTemplates=qcdTemplates,
                                       opt=opt,
                                       qcdScale=0.9)

    ## Produce data histograms
    datahistos = makeDataHistos(datafiles, opt)
    datahistos_added = sumDataHistos(datafiles.keys(), datahistos)
    # Rebin also data, if required:
    if opt.rebin > 0:
        for hist in datahistos_added.values():
            hist.Rebin(opt.rebin)

    ## Save the background only shapes separately as templates for the fit
    cachefile = open(".svlbgtemplates.pck", 'w')
    pickle.dump(bghistos_added, cachefile, pickle.HIGHEST_PROTOCOL)
    print '>>> Dumped bg templates to cache (.svlbgtemplates.pck)'
    cachefile.close()

    ## Read syst histos:
    cachefile = open(".svlsysthistos.pck", 'r')
    systhistos = pickle.load(cachefile)
    print '>>> Read systematics histograms from cache (.svlsysthistos.pck)'
    cachefile.close()

    ## Read mass scan histos:
    cachefile = open(".svlmasshistos.pck", 'r')
    masshistos = pickle.load(cachefile)
    print '>>> Read mass scan histograms from cache (.svlmasshistos.pck)'
    # (tag, chan, mass, comb)      -> histo
    # (tag, chan, mass, comb, ntk) -> histo
    cachefile.close()

    ## Signal only (tt+t+tW) shapes
    signalonly = {}

    ofi = ROOT.TFile.Open(osp.join(opt.outDir, 'pe_inputs.root'), 'RECREATE')
    ofi.cd()

    #####################################################
    ## Central mass point and syst samples
    to_be_processed = ([s for s, _, _, _ in ALLSYSTS] +
                       ['dyup', 'dydown', 'qcdup', 'qcddown', 'ntkmult'])
    if opt.skip_systs: to_be_processed = ['nominal']

    for syst in to_be_processed:
        odir = ofi.mkdir(syst + '_172v5')
        odir.cd()
        for tag, _, _ in SELECTIONS:
            for ntk, _ in NTRKBINS:
                hname = "SVLMass_%s_%s_%s" % (tag, syst + '_172v5', ntk)
                if not syst in [
                        'dyup', 'dydown', 'qcdup', 'qcddown', 'ntkmult',
                        'tchscaleup', 'tchscaledown', 'twchscaleup',
                        'twchscaledown'
                ]:
                    hfinal = systhistos[(tag, syst, 'tot', ntk)].Clone(hname)
                else:
                    hfinal = systhistos[(tag, 'nominal', 'tot',
                                         ntk)].Clone(hname)
                try:
                    ## Systs from separate samples
                    if syst in [
                            'tchscaleup', 'tchscaledown', 'twchscaleup',
                            'twchscaledown'
                    ]:
                        scale = LUMI * xsecweights[CHANMASSTOPROCNAME[('tt',
                                                                       172.5)]]
                    else:
                        scale = LUMI * xsecweights[SYSTTOPROCNAME[syst][0]]
                except KeyError:
                    ## Systs from event weights
                    scale = LUMI * xsecweights[CHANMASSTOPROCNAME[('tt',
                                                                   172.5)]]
                hfinal.Scale(scale)

                ## Renormalize some variations with event weights
                if syst in SYSTSTOBERENORMALIZED:
                    normintegral = systhistos[(tag, 'nominal', 'tot',
                                               ntk)].Integral()
                    normintegral *= LUMI * xsecweights[CHANMASSTOPROCNAME[
                        ('tt', 172.5)]]
                    normintegral /= hfinal.Integral()
                    hfinal.Scale(normintegral)

                ## Add single top
                stProcs = ['t', 'tbar', 'tW', 'tbarW']
                stSystProcs = []
                if 'tchscale' in syst:
                    stProcs = ['tW', 'tbarW']
                    stSystProcs = ['t', 'tbar']
                if 'twchscale' in syst:
                    stProcs = ['t', 'tbar']
                    stSystProcs = ['tW', 'tbarW']
                for st in stProcs:
                    hsinglet = masshistos[(tag, st, 172.5, 'tot',
                                           ntk)].Clone('%s_%s' % (hname, st))
                    hsinglet.Scale(
                        LUMI * xsecweights[CHANMASSTOPROCNAME[(st, 172.5)]])
                    hfinal.Add(hsinglet)
                for st in stSystProcs:
                    hsinglet = systhistos[(tag, syst, 'tot',
                                           ntk)].Clone('%s_%s' % (hname, st))
                    hsinglet.Scale(
                        LUMI * xsecweights[CHANMASSTOPROCNAME[(st, 172.5)]])
                    hfinal.Add(hsinglet)

                ## Save signal only shapes
                if syst == 'nominal':
                    signalonly[(tag, 172.5,
                                ntk)] = hfinal.Clone('%s_sigonly' % hname)
                    signalonly[(tag, 172.5,
                                ntk)].Scale(ntkWeights['inclusive'][ntk])
                    if opt.rebin > 0:
                        signalonly[(tag, 172.5, ntk)].Rebin(opt.rebin)

                ## Add the backgrounds
                if not syst in ['dyup', 'dydown', 'qcdup', 'qcddown']:
                    hfinal.Add(bghistos_added[(tag, ntk)])
                else:  ## From the scaled bghistos if necessary
                    bghistos_added_scaled = {
                        'dyup': bghistos_added_dyup,
                        'dydown': bghistos_added_dydn,
                        'qcdup': bghistos_added_qcdup,
                        'qcddown': bghistos_added_qcddn,
                    }[syst]
                    hfinal.Add(bghistos_added_scaled[(tag, ntk)])

                ## Rebin if requested
                if opt.rebin > 0:
                    hfinal.Rebin(opt.rebin)

                ## Scale by SV track multiplicity weights:
                if not syst == 'ntkmult':
                    hfinal.Scale(ntkWeights['inclusive'][ntk])

                ## Write out to file
                hfinal.Write(hname, ROOT.TObject.kOverwrite)

    #####################################################
    ## Non-central mass points
    ROOT.gSystem.Load('libUserCodeTopMassSecVtx.so')
    from ROOT import th1fmorph
    # extract mass points from dictionary
    mass_points = sorted(list(set([key[2] for key in masshistos.keys()])))
    mass_points = mass_points[1:-1]  # remove outermost points
    debughistos = []
    for mass in mass_points:
        if mass == 172.5: continue
        mname = 'nominal_%s' % str(mass).replace('.', 'v')
        odir = ofi.mkdir(mname)
        odir.cd()
        for tag, _, _ in SELECTIONS:
            for ntk, _ in NTRKBINS:
                hname = "SVLMass_%s_%s_%s" % (tag, mname, ntk)
                hfinal = masshistos[(tag, 'tt', mass, 'tot', ntk)].Clone(hname)
                hfinal.Scale(LUMI *
                             xsecweights[CHANMASSTOPROCNAME[('tt', mass)]])

                ## Add single top (t-channel, for which we have the samples)
                for st in ['t', 'tbar']:
                    hsinglet = masshistos[(tag, st, mass, 'tot',
                                           ntk)].Clone('%s_%s' % (hname, st))
                    hsinglet.Scale(LUMI *
                                   xsecweights[CHANMASSTOPROCNAME[(st, mass)]])
                    hfinal.Add(hsinglet)

                ## Add single top (tW-channel, for which we don't have samples)
                ## Morph between the two extreme mass points to get
                ## the non existing ones
                for st in ['tW', 'tbarW']:
                    if mass not in [166.5, 178.5]:
                        hsingletW = th1fmorph(
                            '%s_%s_morph' % (hname, st),
                            '%s_%s_morphed' % (hname, st),
                            masshistos[(tag, 'tW', 166.5, 'tot', ntk)],
                            masshistos[(tag, 'tW', 178.5, 'tot', ntk)], 166.5,
                            178.5, mass, masshistos[(tag, 'tW', 166.5, 'tot',
                                                     ntk)].Integral())
                        hsingletW.Scale(
                            LUMI *
                            xsecweights[CHANMASSTOPROCNAME[(st, 166.5)]] *
                            TWXSECS[mass] / TWXSECS[166.5])
                        hsingletW.SetDirectory(0)
                    else:
                        hsingletW = masshistos[(tag, st, mass, 'tot',
                                                ntk)].Clone('%s_%s' %
                                                            (hname, st))
                        hsingletW.Scale(
                            LUMI * xsecweights[CHANMASSTOPROCNAME[(st, mass)]])
                    hfinal.Add(hsingletW)

                ## Save signal only shapes
                signalonly[(tag, mass,
                            ntk)] = hfinal.Clone('%s_sigonly' % hname)
                signalonly[(tag, mass,
                            ntk)].Scale(ntkWeights['inclusive'][ntk])
                if opt.rebin > 0:
                    signalonly[(tag, mass, ntk)].Rebin(opt.rebin)

                ## Add the combined backgrounds
                hfinal.Add(bghistos_added[(tag, ntk)])

                ## Rebin if requested
                if opt.rebin > 0:
                    hfinal.Rebin(opt.rebin)

                ## Scale by SV track multiplicity weights:
                hfinal.Scale(ntkWeights['inclusive'][ntk])

                ## Write out to file
                hfinal.Write(hname, ROOT.TObject.kOverwrite)

    ## Save the signal only shapes (tt+t+tW) as input for the combined plot
    cachefile = open(".svlsignalshapes.pck", 'w')
    pickle.dump(signalonly, cachefile, pickle.HIGHEST_PROTOCOL)
    print '>>> Dumped signal only shapes to cache (.svlsignalshapes.pck)'
    cachefile.close()

    ## Write also data histos
    ofi.cd()
    odir = ofi.mkdir('data')
    odir.cd()
    for tag, _, _ in SELECTIONS:
        for ntk, _ in NTRKBINS:
            hname = "SVLMass_%s_data_%s" % (tag, ntk)
            datahistos_added[(tag, ntk)].Write(hname, ROOT.TObject.kOverwrite)

    print('>>> Wrote pseudo experiment inputs to file (%s)' %
          osp.join(opt.outDir, 'pe_inputs.root'))

    ofi.Write()
    ofi.Close()

    return 0
Exemplo n.º 9
0
 def single_template(self,
                     dir,
                     proc,
                     mass,
                     label,
                     scale,
                     MODE='MORPHED',
                     debug=False):
     """
     Return a single template histogram for a given dir, proc, mass and label. The histogram will be scaled by scale
     (corresponding to the cross section times BR of the corresponding Higgs boson). Scale will be modified by a linear
     interpolation scale taking into account differences in acceptance and reconstruction efficiency as a function of
     mass. Two modes exist to determine the template: MORPHED - will use horizontal template morphing (the morphing will
     always be applied from the pivotal, which is closest to mass to minimize uncertainties from the interplation);
     NEAREST_NEIGHBOUR - will use the closest mass point in the list of pivotals w/o any additional horizontal inter-
     polation. 
     """
     ## window of closest pivotal masses below/above mass. Window can be None, if no pivotals exist for a given dir. In
     ## this case return None
     if self.ana_type == "Htaunu" and mass == "":
         single_template = self.load_hist(dir + '/' + proc +
                                          label).Clone(proc + label +
                                                       '_template')
         single_template.Scale(scale)
         return single_template
     window = self.pivotal_mass_window(float(mass), self.pivotals[dir])
     if not window:
         return None
     if float(window[0]) == float(mass) and float(mass) == float(window[1]):
         ## exact match with pivotal: clone exact pivotal
         single_template = self.load_hist(dir + '/' + proc + mass +
                                          label).Clone(proc + mass + label +
                                                       '_template')
         single_template.Scale(scale)
     elif float(window[0]) > float(mass):
         ## mass out of bounds of pivotals (too small)
         single_template = self.load_hist(dir + '/' + proc + window[0] +
                                          label).Clone(proc + window[0] +
                                                       label + '_template')
         single_template.Scale(scale)
     elif float(window[1]) < float(mass):
         ## mass out of bounds of pivotals (too large)
         single_template = self.load_hist(dir + '/' + proc + window[1] +
                                          label).Clone(proc + window[1] +
                                                       label + '_template')
         single_template.Scale(scale)
     else:
         ## mass somewhere between pivotals: masses is the tuple of the embracing pivotals, histos is the tuple of
         ## corresponding template histograms. The closest pivotal to mass is the first element in each of the tuples,
         ## the further away pivotal is second.
         if (float(mass) - float(window[0])) < (float(window[1]) -
                                                float(mass)):
             ## lower bound pivotal closer to mass
             masses = (float(window[0]), float(window[1]))
             histos = (self.load_hist(dir + '/' + proc + window[0] + label),
                       self.load_hist(dir + '/' + proc + window[1] + label))
         else:
             ## upper bound pivotal closer to mass
             masses = (float(window[1]), float(window[0]))
             histos = (self.load_hist(dir + '/' + proc + window[1] + label),
                       self.load_hist(dir + '/' + proc + window[0] + label))
         scale *= self.interpolation_scale(
             (float(masses[0]), histos[0].Integral()),
             (float(masses[1]), histos[1].Integral()), float(mass), debug)
         if MODE == 'MORPHED':
             single_template = th1fmorph(
                 proc + str(mass) + label + '_template',
                 proc + mass + label, histos[0], histos[1], masses[0],
                 masses[1], float(mass), scale * histos[0].Integral(), 0)
         if MODE == 'NEAREST_NEIGHBOUR':
             single_template = histos[0].Clone(proc + str(mass) + label +
                                               '_template')
             single_template.Scale(scale)
     return single_template