コード例 #1
0
def main(args, options):

    #prepare output directory
    if args[0] == options.outDir:
        options.outDir += '/singleTop'
        print 'Warning output directory is the same, renaming as %s' % options.outDir
    os.system('mkdir -p %s' % options.outDir)

    #prepare one task per file to process
    taskList = []
    try:

        treefiles = {}  # procname -> [filename1, filename2, ...]
        if args[0].find('/store') >= 0:
            for filename in fillFromStore(args[0]):
                if not os.path.splitext(filename)[1] == '.root': continue
                isData, pname, splitno = resolveFilename(
                    os.path.basename(filename))
                if not pname in treefiles: treefiles[pname] = []
                taskList.append((filename, isData, options.outDir))
        else:
            for filename in os.listdir(args[0]):
                if not os.path.splitext(filename)[1] == '.root': continue
                isData, pname, splitno = resolveFilename(filename)
                if not pname in treefiles: treefiles[pname] = []
                taskList.append((filename, isData, options.outDir))
    except IndexError:
        print "Please provide a valid input directory"
        return -1

    #submit tasks in parallel, if required, or run sequentially
    if opt.jobs > 0:
        print ' Submitting jobs in %d threads' % opt.jobs
        import multiprocessing as MP
        pool = MP.Pool(opt.jobs)
        pool.map(runSingleTopAnalysisPacked, taskList)
    else:
        for filename, isData, outDir in taskList:
            runSingleTopAnalysis(filename=filename,
                                 isData=isData,
                                 outDir=outDir)

    #EDIT: Write current cuts to sig/bkg file
    sigbkg = open('sig_bkg.txt', 'a')
    sigbkg.write('#####################The Cuts:\n')
    sigbkg.write('Must be an e or mu Event\n')
    sigbkg.write('Exactly 1 Forward Jet with eta<3.2 or eta>4.7\n')
    sigbkg.write('1 < Num, Central Jets < 3\n')
    sigbkg.write('Exactly 1 Sec Vertex\n')
    sigbkg.write('MT > 50\n')
    sigbkg.close()

    return 0
コード例 #2
0
def main(args, options):

	#prepare output directory 
	if args[0]==options.outDir:
		options.outDir += '/singleTop'
		print 'Warning output directory is the same, renaming as %s' % options.outDir
	os.system('mkdir -p %s'%options.outDir)
	
	#prepare one task per file to process
	taskList=[]
	try:
		
		treefiles = {} # procname -> [filename1, filename2, ...]
		if args[0].find('/store')>=0:
			for filename in fillFromStore(args[0]):
				if not os.path.splitext(filename)[1] == '.root': continue	
				isData, pname, splitno = resolveFilename(os.path.basename(filename))
				if not pname in treefiles: treefiles[pname] = []
				taskList.append((filename, isData,options.outDir))
		else:
			for filename in os.listdir(args[0]):
				if not os.path.splitext(filename)[1] == '.root': continue	
				isData, pname, splitno = resolveFilename(filename)
				if not pname in treefiles: treefiles[pname] = []
				taskList.append((filename, isData,options.outDir))
	except IndexError:
		print "Please provide a valid input directory"
		return -1
	
	#submit tasks in parallel, if required, or run sequentially
	if opt.jobs>0:
		print ' Submitting jobs in %d threads' % opt.jobs
		import multiprocessing as MP
		pool = MP.Pool(opt.jobs)
		pool.map(runSingleTopAnalysisPacked,taskList)
	else:
		for filename,isData,outDir in taskList:
			runSingleTopAnalysis(filename=filename,isData=isData,outDir=outDir)

	#EDIT: Write current cuts to sig/bkg file
	sigbkg = open('sig_bkg.txt','a')
	sigbkg.write('#####################The Cuts:\n')
	sigbkg.write('Must be an e or mu Event\n')
	sigbkg.write('Exactly 1 Forward Jet with eta<3.2 or eta>4.7\n')
	sigbkg.write('1 < Num, Central Jets < 3\n')
	sigbkg.write('Exactly 1 Sec Vertex\n')
	sigbkg.write('MT > 50\n')
	sigbkg.close()
			
	return 0
コード例 #3
0
def main(args, opt):
    trees = {}
    for filename in os.listdir(args[0]):
        if not os.path.splitext(filename)[1] == '.root': continue
        isdata, pname, splitno = resolveFilename(filename)
        treefile = os.path.join(args[0], filename)
        tfile = ROOT.TFile.Open(treefile, 'READ')
        trees[pname] = tfile.Get(TREENAME)

    try:
        cachefile = open('genmlbhists.pck', 'r')
        hists = pickle.load(cachefile)
        print '>>> Read syst histos from cache (.svlsysthistos.pck)'
        cachefile.close()
    except IOError:
        hists = {}

        for tag, sel in MLBSELECTIONS:
            for pname in trees.keys():
                print '... processing', pname, tag
                hists[(pname, tag)] = getHistoFromTree(trees[pname],
                                                       sel=sel,
                                                       var='GenMlb',
                                                       hname="GenMlb_%s_%s" %
                                                       (pname, tag),
                                                       nbins=100,
                                                       xmin=0,
                                                       xmax=200,
                                                       titlex=MLBAXISTITLE)

        cachefile = open('genmlbhists.pck', 'w')
        pickle.dump(hists, cachefile, pickle.HIGHEST_PROTOCOL)
        print '>>> Dumped histos to cachefile (genmlbhists.pck)'
        cachefile.close()

    ROOT.gStyle.SetOptTitle(0)
    ROOT.gStyle.SetOptStat(0)
    ROOT.gROOT.SetBatch(1)

    for tag, _ in MLBSELECTIONS:
        plot = RatioPlot('genmlb')
        plot.normalized = False
        plot.add(hists[('TTJets_MSDecays_172v5', tag)], 'Nominal')
        plot.add(hists[('TTJets_MSDecays_scaleup', tag)], 'Q^{2} scale up')
        plot.add(hists[('TTJets_MSDecays_scaledown', tag)], 'Q^{2} scale down')
        plot.tag = "Generator level m_{lb} shape"
        if tag == 'cor':
            plot.subtag = "Correct combinations"
        else:
            plot.subtag = "Wrong combinations"
        plot.ratiotitle = 'Ratio wrt nominal'
        plot.titlex = MLBAXISTITLE
        plot.tagpos = (0.22, 0.85)
        plot.subtagpos = (0.22, 0.78)
        plot.legpos = (0.20, 0.55)
        plot.ratiorange = (0.85, 1.15)
        plot.colors = [ROOT.kBlue - 3, ROOT.kRed - 4, ROOT.kOrange - 3]
        plot.show("genmlb_scale_%s" % tag, opt.outDir)

    return 0
コード例 #4
0
def main(args, opt):
	trees = {}
	for filename in os.listdir(args[0]):
		if not os.path.splitext(filename)[1] == '.root': continue
		isdata, pname, splitno = resolveFilename(filename)
		treefile = os.path.join(args[0], filename)
		tfile = ROOT.TFile.Open(treefile,'READ')
		trees[pname] = tfile.Get(TREENAME)


	try:
		cachefile = open('genmlbhists.pck', 'r')
		hists = pickle.load(cachefile)
		print '>>> Read syst histos from cache (.svlsysthistos.pck)'
		cachefile.close()
	except IOError:
		hists = {}

		for tag,sel in MLBSELECTIONS:
			for pname in trees.keys():
				print '... processing', pname, tag
				hists[(pname,tag)] = getHistoFromTree(trees[pname],
				                                sel=sel,
				                                var='GenMlb',
			                                    hname="GenMlb_%s_%s"%(pname,tag),
		                                        nbins=100,xmin=0,xmax=200,
		                                        titlex=MLBAXISTITLE)

		cachefile = open('genmlbhists.pck', 'w')
		pickle.dump(hists, cachefile, pickle.HIGHEST_PROTOCOL)
		print '>>> Dumped histos to cachefile (genmlbhists.pck)'
		cachefile.close()


	ROOT.gStyle.SetOptTitle(0)
	ROOT.gStyle.SetOptStat(0)
	ROOT.gROOT.SetBatch(1)


	for tag,_ in MLBSELECTIONS:
		plot = RatioPlot('genmlb')
		plot.normalized = False
		plot.add(hists[('TTJets_MSDecays_172v5', tag)],     'Nominal')
		plot.add(hists[('TTJets_MSDecays_scaleup', tag)],   'Q^{2} scale up')
		plot.add(hists[('TTJets_MSDecays_scaledown', tag)], 'Q^{2} scale down')
		plot.tag = "Generator level m_{lb} shape"
		if tag == 'cor':
			plot.subtag = "Correct combinations"
		else:
			plot.subtag = "Wrong combinations"
		plot.ratiotitle = 'Ratio wrt nominal'
		plot.titlex = MLBAXISTITLE
		plot.tagpos    = (0.22, 0.85)
		plot.subtagpos = (0.22, 0.78)
		plot.legpos = (0.20, 0.55)
		plot.ratiorange = (0.85, 1.15)
		plot.colors = [ROOT.kBlue-3, ROOT.kRed-4, ROOT.kOrange-3]
		plot.show("genmlb_scale_%s"%tag, opt.outDir)

	return 0
コード例 #5
0
def main(args, options):

	#prepare output directory 
	if args[0]==options.outDir:
		options.outDir += '/singleTop'
		print 'Warning output directory is the same, renaming as %s' % options.outDir
	os.system('mkdir -p %s'%options.outDir)
	
	#prepare one task per file to process
	taskList=[]
	try:
		
		treefiles = {} # procname -> [filename1, filename2, ...]
		if args[0].find('/store')>=0:
			for filename in fillFromStore(args[0]):
				if not os.path.splitext(filename)[1] == '.root': continue	
				isData, pname, splitno = resolveFilename(os.path.basename(filename))
				if not pname in treefiles: treefiles[pname] = []
				taskList.append((filename, isData,options.outDir))
		else:
			for filename in os.listdir(args[0]):
				if not os.path.splitext(filename)[1] == '.root': continue	
				isData, pname, splitno = resolveFilename(filename)
				if not pname in treefiles: treefiles[pname] = []
				taskList.append((filename, isData,options.outDir))
	except IndexError:
		print "Please provide a valid input directory"
		return -1
	
	#submit tasks in parallel, if required, or run sequentially
	if opt.jobs>0:
		print ' Submitting jobs in %d threads' % opt.jobs
		import multiprocessing as MP
		pool = MP.Pool(opt.jobs)
		pool.map(runSingleTopAnalysisPacked,taskList)
	else:
		for filename,isData,outDir in taskList:
			runSingleTopAnalysis(filename=filename,isData=isData,outDir=outDir)
			
	return 0
コード例 #6
0
ファイル: makeSVLSystPlots.py プロジェクト: pfs/TopMassSecVtx
def main(args, opt):
	os.system('mkdir -p %s'%opt.outDir)
	systfiles = {} # procname -> filename
	try:
		for fname in os.listdir(os.path.join(args[0],'syst')):
			if not os.path.splitext(fname)[1] == '.root': continue
			for syst,_,systfile,_ in SYSTSFROMFILES:
				if fname in systfile:
					systfiles[syst] = [os.path.join(args[0], 'syst', fname)]

		# Get the split nominal files
		systfiles['nominal'] = []
		for fname in os.listdir(os.path.join(args[0],'Chunks')):
			if not os.path.splitext(fname)[1] == '.root': continue
			isdata,procname,splitno = resolveFilename(fname)
			if not procname == 'TTJets_MSDecays_172v5': continue
			if not splitno: continue # file is split

			systfiles['nominal'].append(os.path.join(args[0],'Chunks',fname))
		if len(systfiles['nominal']) < 20:
			print "ERROR >>> Missing files for split nominal sample?"
			return -1

	except IndexError:
		print "Please provide a valid input directory"
		exit(-1)

	hname_to_keys = {} # hname -> (tag, syst, comb)
	tasklist = {} # treefile -> tasklist

	for fsyst in systfiles.keys():
		if not fsyst in tasklist: tasklist[fsyst] = []
		for tag,sel,_ in SELECTIONS:
			if fsyst == 'nominal':
				for syst,_,weight,combs in SYSTSFROMWEIGHTS:
					tasks = makeSystTask(tag, sel, syst,
						                 hname_to_keys, weight=weight,
						                 combs=combs)
					tasklist[fsyst] += tasks

				tasks = []
				for comb,combsel in COMBINATIONS.iteritems():
					for var,nbins,xmin,xmax,titlex in CONTROLVARS:
						hname = "%s_%s_%s" % (var, comb, tag)
						finalsel = "%s*(%s&&%s)"%(COMMONWEIGHT, sel, combsel)
						tasks.append((hname, var, finalsel,
							                 nbins, xmin, xmax, titlex))
						hname_to_keys[hname] = (tag, var, comb)

				tasklist[fsyst] += tasks

				tasks = []
				for name, nus in [('nu', 1), ('nonu', 0), ('nuunm', -1)]:
					hname = "SVLMass_%s_%s_%s" % ('tot', tag, name)
					finalsel = "%s*(%s&&BHadNeutrino==%d)"%(
						                 COMMONWEIGHT, sel, nus)
					tasks.append((hname, 'SVLMass', finalsel,
						          NBINS, XMIN, XMAX, MASSXAXISTITLE))
					hname_to_keys[hname] = (tag, name, 'tot')

				tasklist[fsyst] += tasks


			else:
				tasks = makeSystTask(tag, sel, fsyst, hname_to_keys)
				tasklist[fsyst] += tasks

	if not opt.cache:
		# print '  Will process the following tasks:'
		# for filename,tasks in sorted(tasklist.iteritems()):
		# 	print filename
		# 	for task in tasks:
		# 		print task
		# raw_input("Press any key to continue...")
		runTasks(systfiles, tasklist, opt, 'syst_histos')

		systhistos = {} # (tag, syst, comb) -> histo
		systhistos = gatherHistosFromFiles(tasklist, systfiles,
			                           os.path.join(opt.outDir, 'syst_histos'),
			                           hname_to_keys)


		cachefile = open(".svlsysthistos.pck", 'w')
		pickle.dump(systhistos, cachefile, pickle.HIGHEST_PROTOCOL)
		cachefile.close()


		# print "Wrote syst histos to cache file"
		# raw_input("press key")

	cachefile = open(".svlsysthistos.pck", 'r')
	systhistos = pickle.load(cachefile)
	print '>>> Read syst histos from cache (.svlsysthistos.pck)'
	cachefile.close()

	ROOT.gStyle.SetOptTitle(0)
	ROOT.gStyle.SetOptStat(0)
	ROOT.gROOT.SetBatch(1)


	for var,_,_,_,_ in CONTROLVARS:
		for sel,tag in [
			#('inclusive', 'Fully Inclusive'),
			#('inclusive_mrank1', 'Mass ranked, leading p_{T}'),
			#('inclusive_mrank1dr', 'Mass ranked, #DeltaR<2, leading p_{T}'),
			#('inclusive_drrank1dr', '#DeltaR ranked, #DeltaR<2, leading p_{T}'),
			('inclusive_optmrank', 'Optimized mass rank')]:
			try: makeControlPlot(systhistos, var, sel, tag, opt)
			except KeyError:
				print 'control plots for %s selection not found' % sel

	for tag,_,_ in SELECTIONS:
		if not 'inclusive' in tag : continue
		print "... processing %s"%tag

		# Make plot of mass with and without neutrino:
		# for comb in COMBINATIONS.keys():
		# plot = RatioPlot('neutrino_%s'%tag)
		# plot.rebin = 2
		# plot.add(systhistos[(tag,'nonu', 'tot')], 'Without neutrino')
		# plot.add(systhistos[(tag,'nu',   'tot')], 'With neutrino')
		# plot.add(systhistos[(tag,'nuunm','tot')], 'Unmatched')
		# plot.reference = systhistos[(tag,'nominal','tot')]
		# plot.tag = "Mass shape with and without neutrinos"
		# plot.subtag = SELNAMES[tag] + COMBNAMES['tot']
		# plot.ratiotitle = 'Ratio wrt Total'
		# plot.ratiorange = (0.7, 1.3)
		# plot.colors = [ROOT.kBlue-3, ROOT.kRed-4, ROOT.kOrange-3]
		# plot.show("neutrino_%s_%s"%(tag,'tot'),
		# 	      os.path.join(opt.outDir, 'syst_plots'))
		# plot.reset()

		for name, title, systs, colors, comb in SYSTPLOTS:
			print name, title, systs, colors, comb
			plot = RatioPlot('%s_%s'%(name,comb))
			plot.rebin = 2

			for syst in systs:
				try:
					plot.add(systhistos[(tag,syst,comb)], SYSTNAMES[syst])
				except:
					print 'failed to add',(tag,syst,comb),syst

			plot.tag = title
			subtag = SELNAMES[tag] + COMBNAMES[comb]
			plot.subtag = subtag
			plot.ratiotitle = 'Ratio wrt %s' % SYSTNAMES[systs[0]]
			plot.ratiorange = (0.85, 1.15)
			plot.colors = colors
			filename = "%s_%s"%(name,tag)
			if comb != 'tot': filename += '_%s'%comb
			plot.show(filename, os.path.join(opt.outDir,'syst_plots'))
			plot.reset()

		# Make top pt plot with both correct and wrong
		plot = RatioPlot('toppt_paper_cor_wro')
		plot.canvassize = (600,600)
		plot.tag = 'Top p_{T} mis-modeling'
		plot.rebin = 2
		plot.subtag = 'Inclusive channels'
		plot.ratiotitle = '1 / Nominal'
		plot.ratiorange = (0.85, 1.15)
		plot.legpos = (0.55, 0.30)
		plot.colors = [ROOT.kGreen+2, ROOT.kGreen-6, ROOT.kRed+2, ROOT.kRed-6]
		plot.add(systhistos[(tag,'nominal','cor')], 'Nominal (correct)',      includeInRatio=False)
		plot.add(systhistos[(tag,'toppt','cor')], 'p_{T} weighted (correct)', includeInRatio=True)
		plot.add(systhistos[(tag,'nominal','wro')], 'Nominal (wrong)',        includeInRatio=False)
		plot.add(systhistos[(tag,'toppt','wro')], 'p_{T} weighted (wrong)',   includeInRatio=True)
		plot.reference = [systhistos[(tag,'nominal','cor')], systhistos[(tag,'nominal','wro')]]

		plot.show('toppt_cor_wro_forpaper_%s'%tag, os.path.join(opt.outDir,'syst_plots'))
		plot.reset()

		# Make b fragmentation plot for paper
		plot = RatioPlot('bfrag_paper')
		plot.canvassize = (600,600)
		plot.tag = 'b fragmentation'
		plot.rebin = 2
		plot.subtag = 'Inclusive channels'
		plot.ratiotitle = '1 / Z2* rb LEP'
		plot.ratiorange = (0.85, 1.15)
		plot.legpos = (0.65, 0.15)
		plot.colors = [ROOT.kMagenta, ROOT.kMagenta+2, ROOT.kMagenta-9, ROOT.kAzure+7]
		plot.add(systhistos[(tag,'nominal', 'tot')], 'Z2* rb LEP', includeInRatio=False)
		plot.add(systhistos[(tag,'bfragdn', 'tot')], 'Z2* rb LEP soft')
		plot.add(systhistos[(tag,'bfragup', 'tot')], 'Z2* rb LEP hard')
		plot.add(systhistos[(tag,'bfragz2s','tot')], 'Z2* nominal')
		plot.reference = [systhistos[(tag,'nominal','tot')]]
		plot.show('bfrag_paper_%s'%tag, os.path.join(opt.outDir,'syst_plots'))
		plot.reset()


	# for tag,sel,seltag in SELECTIONS:
	# 	print 70*'-'
	# 	print '%-10s: %s' % (tag, sel)
	# 	fcor, fwro, funm = {}, {}, {}
	# 	for mass in sorted(massfiles.keys()):
	# 	# mass = 172.5
	# 		hists = masshistos[(tag, mass)]
	# 		n_tot, n_cor, n_wro, n_unm = (x.GetEntries() for x in hists)
	# 		fcor[mass] = 100.*(n_cor/float(n_tot))
	# 		fwro[mass] = 100.*(n_wro/float(n_tot))
	# 		funm[mass] = 100.*(n_unm/float(n_tot))
	# 		print ('  %5.1f GeV: %7d entries \t'
	# 			   '(%4.1f%% corr, %4.1f%% wrong, %4.1f%% unmatched)' %
	# 			   (mass, n_tot, fcor[mass], fwro[mass], funm[mass]))

	# 	oname = os.path.join(opt.outDir, 'fracvsmt_%s'%tag)
	# 	plotFracVsTopMass(fcor, fwro, funm, tag, seltag, oname)

	# print 112*'-'
	# print 'Estimated systematics (from a crude chi2 fit)'
	# print '%20s | %-15s | %-15s | %-15s | %-15s | %-15s' % (
	# 									 'selection', 'bfrag', 'scale',
	# 									 'toppt', 'matching', 'uecr')
	# for tag,_,_ in SELECTIONS:
	# 		sys.stdout.write("%20s | " % tag)
	# 		for syst in ['bfrag', 'scale', 'toppt', 'matching', 'uecr']:
	# 			err, chi2 = systematics[(tag,syst)]
	# 			sys.stdout.write('%4.1f (%4.1f GeV)' % (chi2*1e5, err))
	# 			# sys.stdout.write('%4.1f (%4.1f GeV)' % (chi2, err))
	# 			sys.stdout.write(' | ')
	# 		sys.stdout.write('\n')
	# print 112*'-'

	return 0
コード例 #7
0
def main(args, opt):
	os.system('mkdir -p %s'%opt.outDir)
	mcfiles = {}   # procname -> filename
	datafiles = {} # procname -> filename
	try:
		for fname in os.listdir(args[0]):
			if not osp.splitext(fname)[1] == '.root': continue
			isdata,procname,splitno = resolveFilename(fname)
			if isdata:
				if not procname in datafiles:
					datafiles[procname] = []
				datafiles[procname].append(osp.join(args[0],fname))
			else:
				if 'QCD' in procname:                   continue ## exclude QCD
				if procname == 'TTJets_MSDecays_172v5': continue ## have those already
				if 'SingleT' in procname:               continue ## have those already

				if not procname in mcfiles:
					mcfiles[procname] = []
				mcfiles[procname].append(osp.join(args[0],fname))

	except IndexError:
		print "Please provide a valid input directory"
		exit(-1)


	## Produce (or read) the histogram data
	bghistos = makeBackgroundHistos(mcfiles, opt)

	cachefile = open(".xsecweights.pck", 'r')
	xsecweights = pickle.load(cachefile)
	cachefile.close()
	print '>>> Read xsec weights from cache (.xsecweights.pck)'

	cachefile = open(".svldyscalefactors.pck", 'r')
	dySFs = pickle.load(cachefile)
	cachefile.close()
	print '>>> Read DY scale factors from cache (.svldyscalefactors.pck)'

	cachefile = open(".svlqcdtemplates.pck", 'r')
	qcdTemplates = pickle.load(cachefile)
	cachefile.close()
	print '>>> Read QCD templates from cache (.svlqcdtemplates.pck)'

	## Read SV Track multiplicity weights:
	from extractNtrkWeights import extractNTrkWeights
	ntkWeights = extractNTrkWeights()

	## Now add them up with proper scales
	mcprocesses = [k for k in mcfiles.keys() if not 'Data8TeV' in k]
	bghistos_added = sumBGHistos(processes=mcprocesses,
		                         bghistos=bghistos,
		                         xsecweights=xsecweights,
		                         ntkWeights=ntkWeights,
		                         dySFs=dySFs,
		                         qcdTemplates=qcdTemplates,
		                         opt=opt)

	bghistos_added_dyup = sumBGHistos(processes=mcprocesses,
		                         bghistos=bghistos,
		                         xsecweights=xsecweights,
		                         ntkWeights=ntkWeights,
		                         dySFs=dySFs,
		                         qcdTemplates=qcdTemplates,
		                         opt=opt,
		                         dyScale=1.3)
	bghistos_added_dydn = sumBGHistos(processes=mcprocesses,
		                         bghistos=bghistos,
		                         xsecweights=xsecweights,
		                         ntkWeights=ntkWeights,
		                         dySFs=dySFs,
		                         qcdTemplates=qcdTemplates,
		                         opt=opt,
		                         dyScale=0.7)
	bghistos_added_qcdup = sumBGHistos(processes=mcprocesses,
		                         bghistos=bghistos,
		                         xsecweights=xsecweights,
		                         ntkWeights=ntkWeights,
		                         dySFs=dySFs,
		                         qcdTemplates=qcdTemplates,
		                         opt=opt,
		                         qcdScale=1.1)
	bghistos_added_qcddn = sumBGHistos(processes=mcprocesses,
		                         bghistos=bghistos,
		                         xsecweights=xsecweights,
		                         ntkWeights=ntkWeights,
		                         dySFs=dySFs,
		                         qcdTemplates=qcdTemplates,
		                         opt=opt,
		                         qcdScale=0.9)

	## Produce data histograms
	datahistos = makeDataHistos(datafiles, opt)
	datahistos_added = sumDataHistos(datafiles.keys(), datahistos)
	# Rebin also data, if required:
	if opt.rebin>0:
		for hist in datahistos_added.values():
			hist.Rebin(opt.rebin)

	## Save the background only shapes separately as templates for the fit
	cachefile = open(".svlbgtemplates.pck", 'w')
	pickle.dump(bghistos_added, cachefile, pickle.HIGHEST_PROTOCOL)
	print '>>> Dumped bg templates to cache (.svlbgtemplates.pck)'
	cachefile.close()

	## Read syst histos:
	cachefile = open(".svlsysthistos.pck", 'r')
	systhistos = pickle.load(cachefile)
	print '>>> Read systematics histograms from cache (.svlsysthistos.pck)'
	cachefile.close()

	## Read mass scan histos:
	cachefile = open(".svlmasshistos.pck", 'r')
	masshistos = pickle.load(cachefile)
	print '>>> Read mass scan histograms from cache (.svlmasshistos.pck)'
	# (tag, chan, mass, comb)      -> histo
	# (tag, chan, mass, comb, ntk) -> histo
	cachefile.close()

	ofi = ROOT.TFile.Open(osp.join(opt.outDir,'pe_inputs.root'),'RECREATE')
	ofi.cd()

	#####################################################
	## Central mass point and syst samples
	for syst in ([s for s,_,_,_ in ALLSYSTS] +
	             ['dyup','dydown','qcdup','qcddown','ntkmult']):
		odir = ofi.mkdir(syst + '_172v5')
		odir.cd()
		for tag,_,_ in SELECTIONS:
			for ntk,_ in NTRKBINS:
				hname = "SVLMass_%s_%s_%s" % (tag,syst+'_172v5',ntk)
				if not syst in ['dyup','dydown','qcdup','qcddown','ntkmult',
				                'tchscaleup','tchscaledown',
				                'twchscaleup','twchscaledown']:
					hfinal = systhistos[(tag,syst,'tot',ntk)].Clone(hname)
				else:
					hfinal = systhistos[(tag,'nominal','tot',ntk)].Clone(hname)
				try:
					## Systs from separate samples
					if syst in ['tchscaleup','tchscaledown',
					            'twchscaleup','twchscaledown']:
						scale = LUMI*xsecweights[CHANMASSTOPROCNAME[('tt', 172.5)]]
					else:
						scale = LUMI*xsecweights[SYSTTOPROCNAME[syst][0]]
				except KeyError:
					## Systs from event weights
					scale = LUMI*xsecweights[CHANMASSTOPROCNAME[('tt', 172.5)]]
				hfinal.Scale(scale)

				## Renormalize some variations with event weights
				if syst in SYSTSTOBERENORMALIZED:
					normintegral = systhistos[(tag,'nominal','tot',ntk)].Integral()
					normintegral *= LUMI*xsecweights[CHANMASSTOPROCNAME[('tt', 172.5)]]
					normintegral /= hfinal.Integral()
					hfinal.Scale(normintegral)

				## Add single top
				stProcs=['t', 'tbar', 'tW', 'tbarW']
				stSystProcs=[]
				if 'tchscale' in syst:
					stProcs=['tW', 'tbarW']
					stSystProcs=['t', 'tbar']
				if 'twchscale' in syst:
					stProcs=['t', 'tbar']
					stSystProcs=['tW', 'tbarW']
				for st in stProcs:
					hsinglet = masshistos[(tag, st, 172.5,'tot',ntk)].Clone('%s_%s'%(hname,st))
					hsinglet.Scale(LUMI*xsecweights[CHANMASSTOPROCNAME[(st, 172.5)]])
					hfinal.Add(hsinglet)
				for st in stSystProcs:
					hsinglet = systhistos[(tag, syst, 'tot', ntk)].Clone('%s_%s'%(hname,st))
					hsinglet.Scale(LUMI*xsecweights[CHANMASSTOPROCNAME[(st, 172.5)]])
					hfinal.Add(hsinglet)


				## Add the backgrounds
				if not syst in ['dyup','dydown','qcdup','qcddown']:
					hfinal.Add(bghistos_added[(tag,ntk)])
				else: ## From the scaled bghistos if necessary
					bghistos_added_scaled = {
						'dyup'    : bghistos_added_dyup,
						'dydown'  : bghistos_added_dydn,
						'qcdup'   : bghistos_added_qcdup,
						'qcddown' : bghistos_added_qcddn,
					}[syst]
					hfinal.Add(bghistos_added_scaled[(tag,ntk)])

				## Rebin if requested
				if opt.rebin>0:
					hfinal.Rebin(opt.rebin)

				## Scale by SV track multiplicity weights:
				if not syst == 'ntkmult':
					hfinal.Scale(ntkWeights['inclusive'][ntk])

				## Write out to file
				hfinal.Write(hname, ROOT.TObject.kOverwrite)

	#####################################################
	## Non-central mass points
	ROOT.gSystem.Load('libUserCodeTopMassSecVtx.so')
	from ROOT import th1fmorph
	# extract mass points from dictionary
	mass_points = sorted(list(set([key[2] for key in masshistos.keys()])))
	mass_points = mass_points[1:-1] # remove outermost points
	debughistos = []
	for mass in mass_points:
		if mass == 172.5: continue
		mname = 'nominal_%s' % str(mass).replace('.','v')
		odir = ofi.mkdir(mname)
		odir.cd()
		for tag,_,_ in SELECTIONS:
			for ntk,_ in NTRKBINS:
				hname = "SVLMass_%s_%s_%s" % (tag,mname,ntk)
				hfinal = masshistos[(tag,'tt',mass,'tot',ntk)].Clone(hname)
				hfinal.Scale(LUMI*xsecweights[CHANMASSTOPROCNAME[('tt', mass)]])

				## Add single top (t-channel, for which we have the samples)
				for st in ['t', 'tbar']:
					hsinglet = masshistos[(tag, st, mass,'tot',ntk)].Clone('%s_%s'%(hname,st))
					hsinglet.Scale(LUMI*xsecweights[CHANMASSTOPROCNAME[(st, mass)]])
					hfinal.Add(hsinglet)

				## Add single top (tW-channel, for which we don't have samples)
				## Morph between the two extreme mass points to get
				## the non existing ones
				for st in ['tW', 'tbarW']:
					if mass not in [166.5, 178.5]:
						hsingletW = th1fmorph('%s_%s_morph'%(hname,st),
							                  '%s_%s_morphed'%(hname,st),
							                   masshistos[(tag, 'tW', 166.5,'tot',ntk)],
							                   masshistos[(tag, 'tW', 178.5,'tot',ntk)],
							                   166.5, 178.5, mass,
							                   masshistos[(tag, 'tW', 166.5,'tot',ntk)].Integral())
						hsingletW.Scale(LUMI*xsecweights[CHANMASSTOPROCNAME[(st, 166.5)]]
							                * TWXSECS[mass]/TWXSECS[166.5])
						hsingletW.SetDirectory(0)
					else:
						hsingletW = masshistos[(tag, st, mass,'tot',ntk)].Clone('%s_%s'%(hname,st))
						hsingletW.Scale(LUMI*xsecweights[CHANMASSTOPROCNAME[(st, mass)]])
					hfinal.Add(hsingletW)

				## Add the combined backgrounds
				hfinal.Add(bghistos_added[(tag,ntk)])

				## Rebin if requested
				if opt.rebin>0:
					hfinal.Rebin(opt.rebin)

				## Scale by SV track multiplicity weights:
				hfinal.Scale(ntkWeights['inclusive'][ntk])

				## Write out to file
				hfinal.Write(hname, ROOT.TObject.kOverwrite)

	## Write also data histos
	ofi.cd()
	odir = ofi.mkdir('data')
	odir.cd()
	for tag,_,_ in SELECTIONS:
		for ntk,_ in NTRKBINS:
			hname = "SVLMass_%s_data_%s" % (tag,ntk)
			datahistos_added[(tag,ntk)].Write(hname, ROOT.TObject.kOverwrite)


	print ('>>> Wrote pseudo experiment inputs to file (%s)' %
		                      osp.join(opt.outDir,'pe_inputs.root'))

	ofi.Write()
	ofi.Close()

	return 0
コード例 #8
0
def main(args, opt):
    os.system('mkdir -p %s' % opt.outDir)
    mcfiles = {}  # procname -> filename
    datafiles = {}  # procname -> filename
    try:
        for fname in os.listdir(args[0]):
            if not osp.splitext(fname)[1] == '.root': continue
            isdata, procname, splitno = resolveFilename(fname)
            if isdata:
                if not procname in datafiles:
                    datafiles[procname] = []
                datafiles[procname].append(osp.join(args[0], fname))
            else:
                if 'QCD' in procname: continue  ## exclude QCD
                if procname == 'TTJets_MSDecays_172v5':
                    continue  ## have those already
                if 'SingleT' in procname: continue  ## have those already

                if not procname in mcfiles:
                    mcfiles[procname] = []
                mcfiles[procname].append(osp.join(args[0], fname))

    except IndexError:
        print "Please provide a valid input directory"
        exit(-1)

    ## Produce (or read) the histogram data
    bghistos = makeBackgroundHistos(mcfiles, opt)

    cachefile = open(".xsecweights.pck", 'r')
    xsecweights = pickle.load(cachefile)
    cachefile.close()
    print '>>> Read xsec weights from cache (.xsecweights.pck)'

    cachefile = open(".svldyscalefactors.pck", 'r')
    dySFs = pickle.load(cachefile)
    cachefile.close()
    print '>>> Read DY scale factors from cache (.svldyscalefactors.pck)'

    cachefile = open(".svlqcdtemplates.pck", 'r')
    qcdTemplates = pickle.load(cachefile)
    cachefile.close()
    print '>>> Read QCD templates from cache (.svlqcdtemplates.pck)'

    ## Read SV Track multiplicity weights:
    from extractNtrkWeights import extractNTrkWeights
    ntkWeights = extractNTrkWeights()

    ## Now add them up with proper scales
    mcprocesses = [k for k in mcfiles.keys() if not 'Data8TeV' in k]
    bghistos_added = sumBGHistos(processes=mcprocesses,
                                 bghistos=bghistos,
                                 xsecweights=xsecweights,
                                 ntkWeights=ntkWeights,
                                 dySFs=dySFs,
                                 qcdTemplates=qcdTemplates,
                                 opt=opt)

    bghistos_added_dyup = sumBGHistos(processes=mcprocesses,
                                      bghistos=bghistos,
                                      xsecweights=xsecweights,
                                      ntkWeights=ntkWeights,
                                      dySFs=dySFs,
                                      qcdTemplates=qcdTemplates,
                                      opt=opt,
                                      dyScale=1.3)
    bghistos_added_dydn = sumBGHistos(processes=mcprocesses,
                                      bghistos=bghistos,
                                      xsecweights=xsecweights,
                                      ntkWeights=ntkWeights,
                                      dySFs=dySFs,
                                      qcdTemplates=qcdTemplates,
                                      opt=opt,
                                      dyScale=0.7)
    bghistos_added_qcdup = sumBGHistos(processes=mcprocesses,
                                       bghistos=bghistos,
                                       xsecweights=xsecweights,
                                       ntkWeights=ntkWeights,
                                       dySFs=dySFs,
                                       qcdTemplates=qcdTemplates,
                                       opt=opt,
                                       qcdScale=1.1)
    bghistos_added_qcddn = sumBGHistos(processes=mcprocesses,
                                       bghistos=bghistos,
                                       xsecweights=xsecweights,
                                       ntkWeights=ntkWeights,
                                       dySFs=dySFs,
                                       qcdTemplates=qcdTemplates,
                                       opt=opt,
                                       qcdScale=0.9)

    ## Produce data histograms
    datahistos = makeDataHistos(datafiles, opt)
    datahistos_added = sumDataHistos(datafiles.keys(), datahistos)
    # Rebin also data, if required:
    if opt.rebin > 0:
        for hist in datahistos_added.values():
            hist.Rebin(opt.rebin)

    ## Save the background only shapes separately as templates for the fit
    cachefile = open(".svlbgtemplates.pck", 'w')
    pickle.dump(bghistos_added, cachefile, pickle.HIGHEST_PROTOCOL)
    print '>>> Dumped bg templates to cache (.svlbgtemplates.pck)'
    cachefile.close()

    ## Read syst histos:
    cachefile = open(".svlsysthistos.pck", 'r')
    systhistos = pickle.load(cachefile)
    print '>>> Read systematics histograms from cache (.svlsysthistos.pck)'
    cachefile.close()

    ## Read mass scan histos:
    cachefile = open(".svlmasshistos.pck", 'r')
    masshistos = pickle.load(cachefile)
    print '>>> Read mass scan histograms from cache (.svlmasshistos.pck)'
    # (tag, chan, mass, comb)      -> histo
    # (tag, chan, mass, comb, ntk) -> histo
    cachefile.close()

    ## Signal only (tt+t+tW) shapes
    signalonly = {}

    ofi = ROOT.TFile.Open(osp.join(opt.outDir, 'pe_inputs.root'), 'RECREATE')
    ofi.cd()

    #####################################################
    ## Central mass point and syst samples
    to_be_processed = ([s for s, _, _, _ in ALLSYSTS] +
                       ['dyup', 'dydown', 'qcdup', 'qcddown', 'ntkmult'])
    if opt.skip_systs: to_be_processed = ['nominal']

    for syst in to_be_processed:
        odir = ofi.mkdir(syst + '_172v5')
        odir.cd()
        for tag, _, _ in SELECTIONS:
            for ntk, _ in NTRKBINS:
                hname = "SVLMass_%s_%s_%s" % (tag, syst + '_172v5', ntk)
                if not syst in [
                        'dyup', 'dydown', 'qcdup', 'qcddown', 'ntkmult',
                        'tchscaleup', 'tchscaledown', 'twchscaleup',
                        'twchscaledown'
                ]:
                    hfinal = systhistos[(tag, syst, 'tot', ntk)].Clone(hname)
                else:
                    hfinal = systhistos[(tag, 'nominal', 'tot',
                                         ntk)].Clone(hname)
                try:
                    ## Systs from separate samples
                    if syst in [
                            'tchscaleup', 'tchscaledown', 'twchscaleup',
                            'twchscaledown'
                    ]:
                        scale = LUMI * xsecweights[CHANMASSTOPROCNAME[('tt',
                                                                       172.5)]]
                    else:
                        scale = LUMI * xsecweights[SYSTTOPROCNAME[syst][0]]
                except KeyError:
                    ## Systs from event weights
                    scale = LUMI * xsecweights[CHANMASSTOPROCNAME[('tt',
                                                                   172.5)]]
                hfinal.Scale(scale)

                ## Renormalize some variations with event weights
                if syst in SYSTSTOBERENORMALIZED:
                    normintegral = systhistos[(tag, 'nominal', 'tot',
                                               ntk)].Integral()
                    normintegral *= LUMI * xsecweights[CHANMASSTOPROCNAME[
                        ('tt', 172.5)]]
                    normintegral /= hfinal.Integral()
                    hfinal.Scale(normintegral)

                ## Add single top
                stProcs = ['t', 'tbar', 'tW', 'tbarW']
                stSystProcs = []
                if 'tchscale' in syst:
                    stProcs = ['tW', 'tbarW']
                    stSystProcs = ['t', 'tbar']
                if 'twchscale' in syst:
                    stProcs = ['t', 'tbar']
                    stSystProcs = ['tW', 'tbarW']
                for st in stProcs:
                    hsinglet = masshistos[(tag, st, 172.5, 'tot',
                                           ntk)].Clone('%s_%s' % (hname, st))
                    hsinglet.Scale(
                        LUMI * xsecweights[CHANMASSTOPROCNAME[(st, 172.5)]])
                    hfinal.Add(hsinglet)
                for st in stSystProcs:
                    hsinglet = systhistos[(tag, syst, 'tot',
                                           ntk)].Clone('%s_%s' % (hname, st))
                    hsinglet.Scale(
                        LUMI * xsecweights[CHANMASSTOPROCNAME[(st, 172.5)]])
                    hfinal.Add(hsinglet)

                ## Save signal only shapes
                if syst == 'nominal':
                    signalonly[(tag, 172.5,
                                ntk)] = hfinal.Clone('%s_sigonly' % hname)
                    signalonly[(tag, 172.5,
                                ntk)].Scale(ntkWeights['inclusive'][ntk])
                    if opt.rebin > 0:
                        signalonly[(tag, 172.5, ntk)].Rebin(opt.rebin)

                ## Add the backgrounds
                if not syst in ['dyup', 'dydown', 'qcdup', 'qcddown']:
                    hfinal.Add(bghistos_added[(tag, ntk)])
                else:  ## From the scaled bghistos if necessary
                    bghistos_added_scaled = {
                        'dyup': bghistos_added_dyup,
                        'dydown': bghistos_added_dydn,
                        'qcdup': bghistos_added_qcdup,
                        'qcddown': bghistos_added_qcddn,
                    }[syst]
                    hfinal.Add(bghistos_added_scaled[(tag, ntk)])

                ## Rebin if requested
                if opt.rebin > 0:
                    hfinal.Rebin(opt.rebin)

                ## Scale by SV track multiplicity weights:
                if not syst == 'ntkmult':
                    hfinal.Scale(ntkWeights['inclusive'][ntk])

                ## Write out to file
                hfinal.Write(hname, ROOT.TObject.kOverwrite)

    #####################################################
    ## Non-central mass points
    ROOT.gSystem.Load('libUserCodeTopMassSecVtx.so')
    from ROOT import th1fmorph
    # extract mass points from dictionary
    mass_points = sorted(list(set([key[2] for key in masshistos.keys()])))
    mass_points = mass_points[1:-1]  # remove outermost points
    debughistos = []
    for mass in mass_points:
        if mass == 172.5: continue
        mname = 'nominal_%s' % str(mass).replace('.', 'v')
        odir = ofi.mkdir(mname)
        odir.cd()
        for tag, _, _ in SELECTIONS:
            for ntk, _ in NTRKBINS:
                hname = "SVLMass_%s_%s_%s" % (tag, mname, ntk)
                hfinal = masshistos[(tag, 'tt', mass, 'tot', ntk)].Clone(hname)
                hfinal.Scale(LUMI *
                             xsecweights[CHANMASSTOPROCNAME[('tt', mass)]])

                ## Add single top (t-channel, for which we have the samples)
                for st in ['t', 'tbar']:
                    hsinglet = masshistos[(tag, st, mass, 'tot',
                                           ntk)].Clone('%s_%s' % (hname, st))
                    hsinglet.Scale(LUMI *
                                   xsecweights[CHANMASSTOPROCNAME[(st, mass)]])
                    hfinal.Add(hsinglet)

                ## Add single top (tW-channel, for which we don't have samples)
                ## Morph between the two extreme mass points to get
                ## the non existing ones
                for st in ['tW', 'tbarW']:
                    if mass not in [166.5, 178.5]:
                        hsingletW = th1fmorph(
                            '%s_%s_morph' % (hname, st),
                            '%s_%s_morphed' % (hname, st),
                            masshistos[(tag, 'tW', 166.5, 'tot', ntk)],
                            masshistos[(tag, 'tW', 178.5, 'tot', ntk)], 166.5,
                            178.5, mass, masshistos[(tag, 'tW', 166.5, 'tot',
                                                     ntk)].Integral())
                        hsingletW.Scale(
                            LUMI *
                            xsecweights[CHANMASSTOPROCNAME[(st, 166.5)]] *
                            TWXSECS[mass] / TWXSECS[166.5])
                        hsingletW.SetDirectory(0)
                    else:
                        hsingletW = masshistos[(tag, st, mass, 'tot',
                                                ntk)].Clone('%s_%s' %
                                                            (hname, st))
                        hsingletW.Scale(
                            LUMI * xsecweights[CHANMASSTOPROCNAME[(st, mass)]])
                    hfinal.Add(hsingletW)

                ## Save signal only shapes
                signalonly[(tag, mass,
                            ntk)] = hfinal.Clone('%s_sigonly' % hname)
                signalonly[(tag, mass,
                            ntk)].Scale(ntkWeights['inclusive'][ntk])
                if opt.rebin > 0:
                    signalonly[(tag, mass, ntk)].Rebin(opt.rebin)

                ## Add the combined backgrounds
                hfinal.Add(bghistos_added[(tag, ntk)])

                ## Rebin if requested
                if opt.rebin > 0:
                    hfinal.Rebin(opt.rebin)

                ## Scale by SV track multiplicity weights:
                hfinal.Scale(ntkWeights['inclusive'][ntk])

                ## Write out to file
                hfinal.Write(hname, ROOT.TObject.kOverwrite)

    ## Save the signal only shapes (tt+t+tW) as input for the combined plot
    cachefile = open(".svlsignalshapes.pck", 'w')
    pickle.dump(signalonly, cachefile, pickle.HIGHEST_PROTOCOL)
    print '>>> Dumped signal only shapes to cache (.svlsignalshapes.pck)'
    cachefile.close()

    ## Write also data histos
    ofi.cd()
    odir = ofi.mkdir('data')
    odir.cd()
    for tag, _, _ in SELECTIONS:
        for ntk, _ in NTRKBINS:
            hname = "SVLMass_%s_data_%s" % (tag, ntk)
            datahistos_added[(tag, ntk)].Write(hname, ROOT.TObject.kOverwrite)

    print('>>> Wrote pseudo experiment inputs to file (%s)' %
          osp.join(opt.outDir, 'pe_inputs.root'))

    ofi.Write()
    ofi.Close()

    return 0
コード例 #9
0
ファイル: plotMtopvsPtFrac.py プロジェクト: pfs/TopMassSecVtx
def main(args, opt):
	os.system('mkdir -p %s'%opt.outDir)
	treefiles = {} # procname -> filename
	try:
		# Get the data files
		treefiles['data'] = []
		for fname in os.listdir(args[0]):
			if not osp.splitext(fname)[1] == '.root': continue
			isdata,procname,splitno = resolveFilename(fname)
			if not isdata: continue

			treefiles['data'].append(osp.join(args[0],fname))

		# Get the split nominal files
		treefiles['nominal'] = []
		for fname in os.listdir(osp.join(args[0],'Chunks')):
			if not osp.splitext(fname)[1] == '.root': continue
			isdata,procname,splitno = resolveFilename(fname)
			if not procname == 'TTJets_MSDecays_172v5': continue
			if not splitno: continue # file is split

			treefiles['nominal'].append(osp.join(args[0],'Chunks',fname))
		if len(treefiles['nominal']) < 20:
			print "ERROR >>> Missing files for split nominal sample?"
			return -1

	except IndexError:
		print "Please provide a valid input directory"
		exit(-1)

	if not opt.cache:

		tasks = []
		for tag,files in treefiles.iteritems():
			for n,fname in enumerate(files):
				tasks.append((fname, '%s_%d'%(tag,n)))

		frachistos = {}
		if not opt.jobs>1:
			for url, name in tasks:
				_,frachistos[name] = makeHistos((url, name))
		else:
			allhistos = []
			from multiprocessing import Pool
			p = Pool(opt.jobs)
			allhistos = p.map(makeHistos, tasks)
			p.close()
			p.join()
			for name,hists in allhistos:
				frachistos[name] = hists

		# Merge the histos
		frachistos_merged = {}
		for tag in treefiles.keys():
			for n in range(len(treefiles[tag])):
				name = '%s_%d'%(tag,n)
				for ntk,_ in NTRKBINS:
					for var in [v[0] for v in VARS]:
						hist = frachistos[name][(var, ntk)].Clone()
						if not (tag,var,ntk) in frachistos_merged:
							frachistos_merged[(tag,var,ntk)] = hist
						else:
							frachistos_merged[(tag,var,ntk)].Add(hist)

		cachefile = open(".svptfrachistos.pck", 'w')
		pickle.dump(frachistos_merged, cachefile, pickle.HIGHEST_PROTOCOL)
		print ">>> Wrote frachistos to cache (.svptfrachistos.pck)"
		cachefile.close()

	cachefile = open(".svptfrachistos.pck", 'r')
	frachistos = pickle.load(cachefile)
	print '>>> Read frachistos from cache (.svptfrachistos.pck)'
	cachefile.close()

	ROOT.gStyle.SetOptTitle(0)
	ROOT.gStyle.SetOptStat(0)
	ROOT.gROOT.SetBatch(1)

	makeSVMassPlots(frachistos)

	return 0
コード例 #10
0
def main(args, opt):
    os.system('mkdir -p %s' % opt.outDir)
    systfiles = {}  # procname -> filename
    try:
        for fname in os.listdir(os.path.join(args[0], 'syst')):
            if not os.path.splitext(fname)[1] == '.root': continue
            for syst, _, systfile, _ in SYSTSFROMFILES:
                if fname in systfile:
                    systfiles[syst] = [os.path.join(args[0], 'syst', fname)]

        # Get the split nominal files
        systfiles['nominal'] = []
        for fname in os.listdir(os.path.join(args[0], 'Chunks')):
            if not os.path.splitext(fname)[1] == '.root': continue
            isdata, procname, splitno = resolveFilename(fname)
            if not procname == 'TTJets_MSDecays_172v5': continue
            if not splitno: continue  # file is split

            systfiles['nominal'].append(os.path.join(args[0], 'Chunks', fname))
        if len(systfiles['nominal']) < 20:
            print "ERROR >>> Missing files for split nominal sample?"
            return -1

    except IndexError:
        print "Please provide a valid input directory"
        exit(-1)

    hname_to_keys = {}  # hname -> (tag, syst, comb)
    tasklist = {}  # treefile -> tasklist

    for fsyst in systfiles.keys():
        if not fsyst in tasklist: tasklist[fsyst] = []
        for tag, sel, _ in SELECTIONS:
            if fsyst == 'nominal':
                for syst, _, weight, combs in SYSTSFROMWEIGHTS:
                    tasks = makeSystTask(tag,
                                         sel,
                                         syst,
                                         hname_to_keys,
                                         weight=weight,
                                         combs=combs)
                    tasklist[fsyst] += tasks

                tasks = []
                for comb, combsel in COMBINATIONS.iteritems():
                    for var, nbins, xmin, xmax, titlex in CONTROLVARS:
                        hname = "%s_%s_%s" % (var, comb, tag)
                        finalsel = "%s*(%s&&%s)" % (COMMONWEIGHT, sel, combsel)
                        tasks.append(
                            (hname, var, finalsel, nbins, xmin, xmax, titlex))
                        hname_to_keys[hname] = (tag, var, comb)

                tasklist[fsyst] += tasks

                tasks = []
                for name, nus in [('nu', 1), ('nonu', 0), ('nuunm', -1)]:
                    hname = "SVLMass_%s_%s_%s" % ('tot', tag, name)
                    finalsel = "%s*(%s&&BHadNeutrino==%d)" % (COMMONWEIGHT,
                                                              sel, nus)
                    tasks.append((hname, 'SVLMass', finalsel, NBINS, XMIN,
                                  XMAX, MASSXAXISTITLE))
                    hname_to_keys[hname] = (tag, name, 'tot')

                tasklist[fsyst] += tasks

            else:
                tasks = makeSystTask(tag, sel, fsyst, hname_to_keys)
                tasklist[fsyst] += tasks

    if not opt.cache:
        # print '  Will process the following tasks:'
        # for filename,tasks in sorted(tasklist.iteritems()):
        # 	print filename
        # 	for task in tasks:
        # 		print task
        # raw_input("Press any key to continue...")
        runTasks(systfiles, tasklist, opt, 'syst_histos')

        systhistos = {}  # (tag, syst, comb) -> histo
        systhistos = gatherHistosFromFiles(
            tasklist, systfiles, os.path.join(opt.outDir, 'syst_histos'),
            hname_to_keys)

        cachefile = open(".svlsysthistos.pck", 'w')
        pickle.dump(systhistos, cachefile, pickle.HIGHEST_PROTOCOL)
        cachefile.close()

        # print "Wrote syst histos to cache file"
        # raw_input("press key")

    cachefile = open(".svlsysthistos.pck", 'r')
    systhistos = pickle.load(cachefile)
    print '>>> Read syst histos from cache (.svlsysthistos.pck)'
    cachefile.close()

    ROOT.gStyle.SetOptTitle(0)
    ROOT.gStyle.SetOptStat(0)
    ROOT.gROOT.SetBatch(1)

    for var, _, _, _, _ in CONTROLVARS:
        for sel, tag in [
                #('inclusive', 'Fully Inclusive'),
                #('inclusive_mrank1', 'Mass ranked, leading p_{T}'),
                #('inclusive_mrank1dr', 'Mass ranked, #DeltaR<2, leading p_{T}'),
                #('inclusive_drrank1dr', '#DeltaR ranked, #DeltaR<2, leading p_{T}'),
            ('inclusive_optmrank', 'Optimized mass rank')
        ]:
            try:
                makeControlPlot(systhistos, var, sel, tag, opt)
            except KeyError:
                print 'control plots for %s selection not found' % sel

    for tag, _, _ in SELECTIONS:
        if not 'inclusive' in tag: continue
        print "... processing %s" % tag

        # Make plot of mass with and without neutrino:
        # for comb in COMBINATIONS.keys():
        # plot = RatioPlot('neutrino_%s'%tag)
        # plot.rebin = 2
        # plot.add(systhistos[(tag,'nonu', 'tot')], 'Without neutrino')
        # plot.add(systhistos[(tag,'nu',   'tot')], 'With neutrino')
        # plot.add(systhistos[(tag,'nuunm','tot')], 'Unmatched')
        # plot.reference = systhistos[(tag,'nominal','tot')]
        # plot.tag = "Mass shape with and without neutrinos"
        # plot.subtag = SELNAMES[tag] + COMBNAMES['tot']
        # plot.ratiotitle = 'Ratio wrt Total'
        # plot.ratiorange = (0.7, 1.3)
        # plot.colors = [ROOT.kBlue-3, ROOT.kRed-4, ROOT.kOrange-3]
        # plot.show("neutrino_%s_%s"%(tag,'tot'),
        # 	      os.path.join(opt.outDir, 'syst_plots'))
        # plot.reset()

        for name, title, systs, colors, comb in SYSTPLOTS:
            print name, title, systs, colors, comb
            plot = RatioPlot('%s_%s' % (name, comb))
            plot.rebin = 2

            for syst in systs:
                try:
                    plot.add(systhistos[(tag, syst, comb)], SYSTNAMES[syst])
                except:
                    print 'failed to add', (tag, syst, comb), syst

            plot.tag = title
            subtag = SELNAMES[tag] + COMBNAMES[comb]
            plot.subtag = subtag
            plot.ratiotitle = 'Ratio wrt %s' % SYSTNAMES[systs[0]]
            plot.ratiorange = (0.85, 1.15)
            plot.colors = colors
            filename = "%s_%s" % (name, tag)
            if comb != 'tot': filename += '_%s' % comb
            plot.show(filename, os.path.join(opt.outDir, 'syst_plots'))
            plot.reset()

        # Make top pt plot with both correct and wrong
        plot = RatioPlot('toppt_paper_cor_wro')
        plot.canvassize = (600, 600)
        plot.tag = 'Top quark p_{T} mismodeling'
        plot.rebin = 2
        plot.subtag = 'Inclusive channels'
        plot.tagpos = (0.92, 0.85)
        plot.subtagpos = (0.92, 0.78)
        plot.titlex = 'm_{svl} [GeV]'
        plot.ratiotitle = '1 / Nominal'
        # plot.ratiorange = (0.85, 1.15)
        plot.ratiorange = (0.92, 1.08)
        plot.legpos = (0.55, 0.38)
        plot.ratioydivisions = 405
        plot.colors = [
            ROOT.kGreen + 2, ROOT.kGreen - 6, ROOT.kRed + 2, ROOT.kRed - 6
        ]
        plot.add(systhistos[(tag, 'nominal', 'cor')],
                 'Nominal (correct)',
                 includeInRatio=False)
        plot.add(systhistos[(tag, 'toppt', 'cor')],
                 'p_{T} weighted (correct)',
                 includeInRatio=True)
        plot.add(systhistos[(tag, 'nominal', 'wro')],
                 'Nominal (wrong)',
                 includeInRatio=False)
        plot.add(systhistos[(tag, 'toppt', 'wro')],
                 'p_{T} weighted (wrong)',
                 includeInRatio=True)
        plot.reference = [
            systhistos[(tag, 'nominal', 'cor')],
            systhistos[(tag, 'nominal', 'wro')]
        ]

        plot.show('toppt_cor_wro_forpaper_%s' % tag,
                  os.path.join(opt.outDir, 'syst_plots'))
        plot.reset()

        # Make b fragmentation plot for paper
        plot = RatioPlot('bfrag_paper')
        plot.canvassize = (600, 600)
        plot.tag = 'b fragmentation'
        plot.titlex = 'm_{svl} [GeV]'
        plot.rebin = 2
        plot.subtag = 'Inclusive channels'
        plot.tagpos = (0.92, 0.85)
        plot.subtagpos = (0.92, 0.78)
        plot.ratiotitle = '1 / Z2* #it{r}_{b} LEP'
        # plot.ratiorange = (0.85, 1.15)
        plot.ratiorange = (0.92, 1.08)
        plot.legpos = (0.65, 0.20)
        plot.ratioydivisions = 405
        plot.colors = [
            ROOT.kMagenta, ROOT.kMagenta + 2, ROOT.kMagenta - 9,
            ROOT.kAzure + 7
        ]
        plot.add(systhistos[(tag, 'nominal', 'tot')],
                 'Z2* #it{r}_{b} LEP',
                 includeInRatio=False)
        plot.add(systhistos[(tag, 'bfragdn', 'tot')],
                 'Z2* #it{r}_{b} LEP soft')
        plot.add(systhistos[(tag, 'bfragup', 'tot')],
                 'Z2* #it{r}_{b} LEP hard')
        plot.add(systhistos[(tag, 'bfragz2s', 'tot')], 'Z2* nominal')
        plot.reference = [systhistos[(tag, 'nominal', 'tot')]]
        plot.show('bfrag_paper_%s' % tag, os.path.join(opt.outDir,
                                                       'syst_plots'))
        plot.reset()

    # for tag,sel,seltag in SELECTIONS:
    # 	print 70*'-'
    # 	print '%-10s: %s' % (tag, sel)
    # 	fcor, fwro, funm = {}, {}, {}
    # 	for mass in sorted(massfiles.keys()):
    # 	# mass = 172.5
    # 		hists = masshistos[(tag, mass)]
    # 		n_tot, n_cor, n_wro, n_unm = (x.GetEntries() for x in hists)
    # 		fcor[mass] = 100.*(n_cor/float(n_tot))
    # 		fwro[mass] = 100.*(n_wro/float(n_tot))
    # 		funm[mass] = 100.*(n_unm/float(n_tot))
    # 		print ('  %5.1f GeV: %7d entries \t'
    # 			   '(%4.1f%% corr, %4.1f%% wrong, %4.1f%% unmatched)' %
    # 			   (mass, n_tot, fcor[mass], fwro[mass], funm[mass]))

    # 	oname = os.path.join(opt.outDir, 'fracvsmt_%s'%tag)
    # 	plotFracVsTopMass(fcor, fwro, funm, tag, seltag, oname)

    # print 112*'-'
    # print 'Estimated systematics (from a crude chi2 fit)'
    # print '%20s | %-15s | %-15s | %-15s | %-15s | %-15s' % (
    # 									 'selection', 'bfrag', 'scale',
    # 									 'toppt', 'matching', 'uecr')
    # for tag,_,_ in SELECTIONS:
    # 		sys.stdout.write("%20s | " % tag)
    # 		for syst in ['bfrag', 'scale', 'toppt', 'matching', 'uecr']:
    # 			err, chi2 = systematics[(tag,syst)]
    # 			sys.stdout.write('%4.1f (%4.1f GeV)' % (chi2*1e5, err))
    # 			# sys.stdout.write('%4.1f (%4.1f GeV)' % (chi2, err))
    # 			sys.stdout.write(' | ')
    # 		sys.stdout.write('\n')
    # print 112*'-'

    return 0
コード例 #11
0
def main(args, opt):
    os.system('mkdir -p %s' % opt.outDir)
    treefiles = {}  # procname -> filename
    try:
        # Get the data files
        treefiles['data'] = []
        for fname in os.listdir(args[0]):
            if not osp.splitext(fname)[1] == '.root': continue
            isdata, procname, splitno = resolveFilename(fname)
            if not isdata: continue

            treefiles['data'].append(osp.join(args[0], fname))

        # Get the split nominal files
        treefiles['nominal'] = []
        for fname in os.listdir(osp.join(args[0], 'Chunks')):
            if not osp.splitext(fname)[1] == '.root': continue
            isdata, procname, splitno = resolveFilename(fname)
            if not procname == 'TTJets_MSDecays_172v5': continue
            if not splitno: continue  # file is split

            treefiles['nominal'].append(osp.join(args[0], 'Chunks', fname))
        if len(treefiles['nominal']) < 20:
            print "ERROR >>> Missing files for split nominal sample?"
            return -1

    except IndexError:
        print "Please provide a valid input directory"
        exit(-1)

    if not opt.cache:

        tasks = []
        for tag, files in treefiles.iteritems():
            for n, fname in enumerate(files):
                tasks.append((fname, '%s_%d' % (tag, n)))

        frachistos = {}
        if not opt.jobs > 1:
            for url, name in tasks:
                _, frachistos[name] = makeHistos((url, name))
        else:
            allhistos = []
            from multiprocessing import Pool
            p = Pool(opt.jobs)
            allhistos = p.map(makeHistos, tasks)
            p.close()
            p.join()
            for name, hists in allhistos:
                frachistos[name] = hists

        # Merge the histos
        frachistos_merged = {}
        for tag in treefiles.keys():
            for n in range(len(treefiles[tag])):
                name = '%s_%d' % (tag, n)
                for ntk, _ in NTRKBINS:
                    for var in [v[0] for v in VARS]:
                        hist = frachistos[name][(var, ntk)].Clone()
                        if not (tag, var, ntk) in frachistos_merged:
                            frachistos_merged[(tag, var, ntk)] = hist
                        else:
                            frachistos_merged[(tag, var, ntk)].Add(hist)

        cachefile = open(".svptfrachistos.pck", 'w')
        pickle.dump(frachistos_merged, cachefile, pickle.HIGHEST_PROTOCOL)
        print ">>> Wrote frachistos to cache (.svptfrachistos.pck)"
        cachefile.close()

    cachefile = open(".svptfrachistos.pck", 'r')
    frachistos = pickle.load(cachefile)
    print '>>> Read frachistos from cache (.svptfrachistos.pck)'
    cachefile.close()

    ROOT.gStyle.SetOptTitle(0)
    ROOT.gStyle.SetOptStat(0)
    ROOT.gROOT.SetBatch(1)

    makeSVMassPlots(frachistos)

    return 0