示例#1
0
xsec = sample.xsec
nEvents = sample.nEvents
lumiweight1fb = xsec * 1000. / nEvents

# output directory
output_directory = os.path.join(skim_output_directory, 'gen', args.targetDir,
                                sample.name)

if not os.path.exists(output_directory):
    os.makedirs(output_directory)
    logger.info("Created output directory %s", output_directory)

# Load reweight pickle file if supposed to keep weights.
extra_variables = []
if args.addReweights:
    weightInfo = WeightInfo(sample.reweight_pkl)
    # Determine coefficients for storing in vector
    # Sort Ids wrt to their position in the card file

    # weights for the ntuple
    rw_vector = TreeVariable.fromString("rw[w/F," + ",".join(
        w + '/F' for w in weightInfo.variables) + "]")
    rw_vector.nMax = weightInfo.nid
    extra_variables.append(rw_vector)

    # coefficients for the weight parametrization
    param_vector = TreeVariable.fromString("p[C/F]")
    param_vector.nMax = HyperPoly.get_ndof(weightInfo.nvar,
                                           args.interpolationOrder)
    hyperPoly = HyperPoly(args.interpolationOrder)
    extra_variables.append(param_vector)
示例#2
0
        'lepSel2-gammapt40-njet2p-nbjet1p-relIso0to0.12-met40-leptonIso2')

    ttZSample.setSelectionString(ttZSelectionString)
    ttgamma1lSample.setSelectionString(ttgamma1lSelectionString)
    ttgamma2lSample.setSelectionString(ttgamma2lSelectionString)

    # somehow has to be separate from the next loop
    if args.small:
        for s in signal + ttZBg + ttgammaBg:
            s.reduceFiles(to=20)

    # configure samples
    for s in signal + ttZBg + ttgammaBg:

        s.event_factor = s.nEvents / float(s.chain.GetEntries())
        s.weightInfo = WeightInfo(s.reweight_pkl)
        s.weightInfo.set_order(args.order)

        if checkReferencePoint(s):
            s.setWeightString('ref_lumiweight1fb*(%s)*(%s)' %
                              (str(args.luminosity), str(s.event_factor)))
        else:
            s.setWeightString('lumiweight1fb*(%s)*(%s)' %
                              (str(args.luminosity), str(s.event_factor)))

    observation = {}

    signal_jec_uncertainty = {}
    signal_fakerate_uncertainty = {}

    ttZ_SM_rate = {}
示例#3
0
        ttgammaFakeSample, tWSample, tWZSample, tZqSample, ZgammaSample
    ]
elif args.processFile == 'ttgamma_2l':
    bg = [
        ttgammaIsrSample, ttgammaLepSample, ttgammabSample, ttSample,
        ttgammaFakeSample, tWSample, tWZSample, tZqSample, ZgammaSample
    ]
else:
    bg = [
        ttgammaIsrSample, ttgammaLepSample, ttgammabSample, ttSample,
        ttgammaFakeSample, tWSample, tWZSample, tZqSample, ZgammaSample
    ]

# Polynomial parametrization
# ATTENTION IF U USE MORE THAN ONE SIGNAL SAMPLE!!!
w = WeightInfo(ttXSample.reweight_pkl)
w.set_order(int(args.order))
if len(args.variables) == 0: args.variables = w.variables


def checkReferencePoint(sample):
    ''' check if sample is simulated with a reference point
    '''
    return pickle.load(file(sample.reweight_pkl))['ref_point'] != {}


# somehow this has to be done first, not in the next loop
if args.small:
    for s in [ttXSample] + bg:
        s.reduceFiles(to=10)
示例#4
0
    # ttSample #select tt events with non-isolated gamma or gamma from bottom (cat c1 + c2)
    # signal: ttgamma events with isolated gamma from gluon or top (cat a1)

elif args.processFile.split('_')[0] == 'ttZ':
    ttZISRSample  = copy.deepcopy( ttXSample ) #select ttgamma events with isolated gamma from ISR (cat a2)
    ttZISRSample.name = 'fwlite_ttZ_(non-t)_LO_order2_15weights_ref'

if args.processFile == 'ttZ_3l': bg = [ ttZISRSample, WZSample, tWZSample, tZqSample, ttgammaSample ]
elif args.processFile == 'ttZ_4l': bg = [ ttZISRSample, WZSample, tWZSample, tZqSample, ttgammaSample ]
elif args.processFile == 'ttgamma_1l': bg = [ ttgammaIsrSample, ttgammaLepSample, ttgammabSample, ttSample, ttgammaFakeSample, tWSample, tWZSample, tZqSample, ZgammaSample ]
elif args.processFile == 'ttgamma_2l': bg = [ ttgammaIsrSample, ttgammaLepSample, ttgammabSample, ttSample, ttgammaFakeSample, tWSample, tWZSample, tZqSample, ZgammaSample ]
else: bg = [ ttgammaIsrSample, ttgammaLepSample, ttgammabSample, ttSample, ttgammaFakeSample, tWSample, tWZSample, tZqSample, ZgammaSample ]

# Polynomial parametrization
# ATTENTION IF U USE MORE THAN ONE SIGNAL SAMPLE!!!
w = WeightInfo(ttXSample.reweight_pkl)
w.set_order(int(args.order))
if len(args.variables) == 0: args.variables = w.variables

def checkReferencePoint( sample ):
    ''' check if sample is simulated with a reference point
    '''
    return pickle.load(file(sample.reweight_pkl))['ref_point'] != {}

# somehow this has to be done first, not in the next loop
if args.small:
    for s in [ttXSample] + bg:
        s.reduceFiles( to = 1 )


# configure samples
示例#5
0
#w.get_total_weight_yield( coeffLists, x=1) # 1+1**2 = 2
#w.get_total_weight_yield( coeffLists, x=2)   # 1+2**2 = 5
#w.get_diff_weight_yield( 'x', coeffLists[0], x=3) # 2*3 = 6
#w.get_fisherInformation_matrix( coeffLists[0], x=2.) # -> 1/5.*(2*2)**2
#w.get_christoffels( coeffLists )
#
## christoffel
#christoffels = w.get_christoffels( coeffLists )
#christoffels(0, (3.) ) # 0.5*1/(1/(10.)*(6.)**2)*(1/10.*(6)**3+2/10.**2*6*2) = 3.0333333333333337

# Make a 2nd oder dummy parametrization
#./make_reweight_card.py --overwrite --filename out_xy.dat --couplings 2 x 1 y 1

# Load parametrization
from TTXPheno.Tools.WeightInfo import WeightInfo
w = WeightInfo(
    "/afs/hephy.at/data/rschoefbeck01/TTXPheno/test_param/out_xy.pkl")
w.set_order(2)

variables = ('x', 'y')

# Specify a parametrization 1/x/y/x^2/xy/y^2
#1+x**2
coeffLists = [[0, 1, 1, 0, 1, 0]]

# correct->
w.get_total_weight_yield(coeffLists, x=1, y=1)  # 1+1**2 = 2
w.get_total_weight_yield(coeffLists, x=2)  # 1+2**2 = 5
w.get_diff_weight_yield('x', coeffLists[0], x=3)  # 2*3 = 6
w.get_fisherInformation_matrix(coeffLists[0], x=2.)  # -> 1/5.*(2*2)**2
w.get_christoffels(coeffLists)
示例#6
0
# Import samples
sample_file = "$CMSSW_BASE/python/TTXPheno/samples/benchmarks.py"
samples = imp.load_source("samples", os.path.expandvars(sample_file))
sample = getattr(samples, args.sample)

# Scale the plots with number of events used (implemented in ref_lumiweight1fb)
event_factor = 1.
if args.small:
    sample.reduceFiles(to=20)
    event_factor = sample.nEvents / float(sample.chain.GetEntries())

#print(sample.chain.GetEntries())
#exit()

# Polynomial parametrization
w = WeightInfo(sample.reweight_pkl)
w.set_order(int(args.order))

colors = [
    ROOT.kMagenta + 1, ROOT.kOrange, ROOT.kBlue, ROOT.kCyan + 1,
    ROOT.kGreen + 1, ROOT.kRed, ROOT.kViolet, ROOT.kYellow + 2
]

coeffs = args.parameters[::2]
str_vals = args.parameters[1::2]
vals = list(map(float, str_vals))
params = []
for i_param, (coeff, val, str_val) in enumerate(zip(coeffs, vals, str_vals)):
    params.append({
        'legendText': ' '.join([coeff, str_val]),
        'WC': {
示例#7
0
args = argParser.parse_args()

#
# Logger
#
import TTXPheno.Tools.logger as logger
import RootTools.core.logger as logger_rt
logger    = logger.get_logger(   args.logLevel, logFile = None)
logger_rt = logger_rt.get_logger(args.logLevel, logFile = None)

if args.small: args.plot_directory += "_small"

# Import samples
from TTXPheno.samples.benchmarks import *
sample = fwlite_ttZ_ll_LO_order3_8weights 
w = WeightInfo(sample.reweight_pkl)
w.set_order(3)

params = [  
    {'legendText':'SM',  'color':ROOT.kBlue, 'WC':{}},
   ] 
params += [ {'legendText':'ctZI %i'%ctZI, 'color':ROOT.kRed,  'WC':{'ctZI':ctZI} } for ctZI in range(-6,7)]

#samples = []
#for i_param, param in enumerate(params):
#    samples.append(sample)
#    samples[-1].name+='_%i'%i_param
#    print param
#    samples[-1].style = styles.lineStyle( param['color'] )
#    samples[-1].texName = param['legendText']
#    #samples[-1].ttreeFormula = ROOT.TTreeFormula("weightstring_%i"%i_param,  w.get_weight_string(**param['WC']), samples[-1].chain )
#Output directory and makeing the dataset small
if args.small:
    args.version += '_small'
    sample.reduceFiles( to = 10 )


output_dir = os.path.join(tmp_directory, args.version)
if not os.path.exists(output_dir):
    os.makedirs( output_dir )

# lumi
lumi = 150

# Polynomial parametrization
w = WeightInfo(sample.reweight_pkl)
w.set_order(2)

# function that evaluates the weight of the SM hypothesis
sm_weight = w.get_weight_func()
# function that evaluates the weight of the BSM hypothesis
bsm_weight = w.get_weight_func(ctGI=10)

selectionString = "Sum$(genLep_pt>10&&(abs(genLep_pdgId)==11||abs(genLep_pdgId)==13)&&abs(genLep_eta)<2.5)==3&&Sum$(genLep_pt>20&&(abs(genLep_pdgId)==11||abs(genLep_pdgId)==13)&&abs(genLep_eta)<2.5)>=2&&Sum$(genLep_pt>40&&(abs(genLep_pdgId)==11||abs(genLep_pdgId)==13)&&abs(genLep_eta)<2.5)>=1&&abs(genZ_mass-91.2)<=10&&Sum$(genJet_pt>30&&abs(genJet_eta)<2.4)>=3&&Sum$(genJet_pt>30&&genJet_matchBParton>=1&&abs(genJet_eta)<2.4)>=1&&genZ_pt>=0"
sample.setSelectionString( selectionString ) 

# Define variables
file_read_variables = [ "genZ_pt/F", "genZ_eta/F", "genZ_phi/F", "genZ_mass/F", "genZ_cosThetaStar/F", "ref_lumiweight1fb/F" ]
read_variables = map( TreeVariable.fromString, file_read_variables)
read_variables.append( VectorTreeVariable.fromString('p[C/F]', nMax=2000) )
subDirectory = '_'.join(subDirectory)

# Plot directory
plot_directory_ = os.path.join(\
    plot_directory,
    args.plot_directory,
    sample.name,
    subDirectory)

if not os.path.isdir(plot_directory_): os.makedirs(plot_directory_)

# get TChain
#c = sample.chain

# Polynomial parametrization
w = WeightInfo(sample.reweight_pkl)
w.set_order(int(args.order))

colors = [
    ROOT.kMagenta + 1, ROOT.kOrange, ROOT.kBlue, ROOT.kCyan + 1,
    ROOT.kGreen + 1, ROOT.kRed, ROOT.kViolet, ROOT.kYellow + 2
]

# selection
selection_string = cutInterpreter.cutString(args.selection)

# sample weight
#weight_string = '150*lumiweight1fb'
weight_string = None

# coefficient list (all weights)
示例#10
0
# TTXPheno
from TTXPheno.Tools.cutInterpreterGen import cutInterpreter
from TTXPheno.samples.benchmarks import *
from TTXPheno.Tools.user import plot_directory

# Sample
ttZ_sample = fwlite_ttZ_ll_LO_order2_15weights_ref

# reduce dataset
#ttZ_sample.reduceFiles( to = 1 )
# approximately compensate reduction of files
#ttZ_sample.setWeightString("200")

# get the reweighting function
from TTXPheno.Tools.WeightInfo import WeightInfo
ttZ_sample.weightInfo = WeightInfo(ttZ_sample.reweight_pkl)
ttZ_sample.weightInfo.set_order(2)

# Load the analysis regions
from TTXPheno.Analysis.regions import regions


class FakeBackground:
    ''' Just a fake background number producer. Ignores everything, returns a number.
    '''
    def __init__(self, name, relative_fraction):
        self.name = name
        self.relative_fraction = relative_fraction

    def setSelectionString(self, *args, **kwargs):
        pass
示例#11
0
textsize = 0.04
#lumi = 77 #36+41
#sigmaxBR =
#Nsim = 5000

# sample = test
sample = dim6top_ttZ_ll_LO_highStat_scan
#sample = dim6top_ttZ_ll_LO_currentplane_highStat_scan

# just 1 file
sample.files = sample.files

# get TChain
c = sample.chain

w = WeightInfo(sample.reweight_pkl)
w.set_order(2)

c.Draw("Z_pt>>h_Zpt(50,0,550)")  # "weight*(%s)" % weightString(cpt=0.2)
#c.Draw("Z_pt>>h_Zpt1(50,0,550)", '(' + w.get_weight_string(cpt=vcpt, cpQM=vcpQM) + ')/p_C[0]')
c.Draw(
    "Z_pt>>h_Zptarg(50,0,550)",
    '(' + w.get_weight_string(cpt=vcpt, cpQM=vcpQM, ctZ=vctZ, ctZI=vctZI) +
    ')/p_C[0]')
c.Draw(
    "Z_pt>>h_Zptargcut(50,0,550)",
    '(' + w.get_weight_string(cpt=vcpt, cpQM=vcpQM, ctZ=vctZ, ctZI=vctZI) +
    ')/p_C[0]*(p_C[5]<' + pc5thresh + ')')

#num = []
#num_arg = []
示例#12
0
# Legend
legend = ROOT.TLegend(0.62, 0.78, .9, .9)
legend.SetTextSize(textsize)
legend.SetTextFont(1)
legend.AddEntry(ROOT.h_weights, "no cut", "l")
legend.AddEntry(ROOT.h_weights_Zpt, "pT(Z) > 300 GeV", "l")


#labelling
label0 = ROOT.TText()
label0.SetTextAngle(90)
label0.SetNDC()
label0.SetTextFont(1)
label0.SetTextSize(textsize)
label0.SetTextColor(ROOT.kBlack)


ROOT.h_weights.Draw('HIST')
ROOT.h_weights_Zpt.Draw('HIST SAME')


w = WeightInfo(sample.reweight_pkl)
w.set_order( 2 )
for k, el in enumerate(['c0'] + ['*'.join([item.split('rw_')[1] for item in el.split('*')[1:]]) for el in w.weight_string().split('+')][1:]):
    label0.DrawText(0.13+k*.053, 0.35, el)
#label0.Draw('SAME')
line.Draw('SAME')
legend.Draw()

c1.Print(os.path.join(plot_directory, 'weight.png'))
示例#13
0
# Import samples
sample_file = "$CMSSW_BASE/python/TTXPheno/samples/benchmarks.py"
samples = imp.load_source("samples", os.path.expandvars(sample_file))
sample = getattr(samples, args.sample + '_%s' % args.detector)

# Scale the plots with number of events used (implemented in ref_lumiweight1fb)
fisher_directory = 'fisher_information'
if args.small:
    sample.reduceFiles(to=1)
    fisher_directory += '_small'

event_factor = sample.nEvents / float(sample.chain.GetEntries())

# Polynomial parametrization
w = WeightInfo(sample.reweight_pkl)
w.set_order(int(args.order))

if len(args.variables) == 0:
    inputvariables = w.variables
    # sort list in same order as w.variables (necessary for EV and EVec calculation)
else:
    inputvariables = [item for item in w.variables if item in args.variables]
# Format input parameters to dict
WC_string = 'SM'
WC = {}
if args.parameters is not None:
    coeffs = args.parameters[::2]
    str_vals = args.parameters[1::2]
    vals = list(map(float, str_vals))
    WC = {coeff: vals[i] for i, coeff in enumerate(coeffs)}
示例#14
0
sample = fwlite_ttW_LO_order3_8weights
#sample = fwlite_ttZ_ll_LO_order3_8weights

# Background
TTBar = fwlite_tt_full_LO_order2_15weights_CMS
TW = fwlite_tW_LO_order2_15weights_CMS
TTZ = fwlite_ttZ_ll_LO_order2_15weights_ref_ext_phase2_CMS 
WZ = fwlite_WZ_lep_LO_order2_15weights_CMS 
#TTW = fwlite_ttW_LO_order3_8weights

bg = [ TTBar, TW, TTZ, WZ ]
#bg = [ TTBar, TW, TTW, WZ ]

# Polynomial parametrization
w = WeightInfo( sample.reweight_pkl )
w.set_order(int(args.order))

def checkReferencePoint( sample ):
    ''' check if sample is simulated with a reference point
    '''
    return pickle.load(file(sample.reweight_pkl))['ref_point'] != {}

if args.small:
    for s in [sample] + bg:
        s.reduceFiles( to = 10 )

# configure samples
for s in [sample] + bg:
    # Scale the plots with number of events used (implemented in ref_lumiweight1fb)
    s.event_factor = s.nEvents / float( s.chain.GetEntries() )
示例#15
0
index = 5
#lumi = 77 #36+41
#sigmaxBR = 
#Nsim = 5000

# sample = test
sample = dim6top_ttZ_ll_LO_highStat_scan
#sample = dim6top_ttZ_ll_LO_currentplane_highStat_scan

# just 1 file
sample.files = sample.files

# get TChain
c = sample.chain 

w = WeightInfo(sample.reweight_pkl)
w.set_order( 2 )
minrange = str(-5)
maxrange = str(5)
c.Draw("p_C[" + str(index) + "]>>h_pc(50,0.,.0003)",'(Z_pt>300)*(p_C[5]<0.002)') # "weight*(%s)" % weightString(cpt=0.2)
#c.Draw("p_C[" + str(index) + "]>>h_pcptZ(50,0.158,.16)",'(Z_pt>345)*(Z_pt<355)') # "weight*(%s)" % weightString(cpt=0.2)
c.Draw("p_C[" + str(index) + "]>>h_pcptZ(50,-1,1)",'(Z_pt>345)*(Z_pt<355)') # "weight*(%s)" % weightString(cpt=0.2)
#c.Draw("p_C[0]>>h_pc0ptZ(50," + minrange + "," + maxrange + ")",'(Z_pt>340)*(Z_pt<360)') # "weight*(%s)" % weightString(cpt=0.2)
#c.Draw("p_C[1]>>h_pc1ptZ(50," + minrange + "," + maxrange + ")",'(Z_pt>340)*(Z_pt<360)') # "weight*(%s)" % weightString(cpt=0.2)
#c.Draw("p_C[2]>>h_pc2ptZ(50," + minrange + "," + maxrange + ")",'(Z_pt>340)*(Z_pt<360)') # "weight*(%s)" % weightString(cpt=0.2)
#c.Draw("p_C[3]>>h_pc3ptZ(50," + minrange + "," + maxrange + ")",'(Z_pt>340)*(Z_pt<360)') # "weight*(%s)" % weightString(cpt=0.2)
#c.Draw("p_C[4]>>h_pc4ptZ(50," + minrange + "," + maxrange + ")",'(Z_pt>340)*(Z_pt<360)') # "weight*(%s)" % weightString(cpt=0.2)
#c.Draw("p_C[5]>>h_pc5ptZ(50," + minrange + "," + maxrange + ")",'(Z_pt>340)*(Z_pt<360)') # "weight*(%s)" % weightString(cpt=0.2)
#c.Draw("p_C[6]>>h_pc6ptZ(50," + minrange + "," + maxrange + ")",'(Z_pt>340)*(Z_pt<360)') # "weight*(%s)" % weightString(cpt=0.2)
#c.Draw("p_C[7]>>h_pc7ptZ(50," + minrange + "," + maxrange + ")",'(Z_pt>340)*(Z_pt<360)') # "weight*(%s)" % weightString(cpt=0.2)
#c.Draw("p_C[8]>>h_pc8ptZ(50," + minrange + "," + maxrange + ")",'(Z_pt>340)*(Z_pt<360)') # "weight*(%s)" % weightString(cpt=0.2)