Пример #1
0
# Logging
import StopsDilepton.tools.logger as logger

logger = logger.get_logger('INFO', logFile = None )
histos = {}
for m in ['doubleMu', 'doubleEle', 'muEle']:
    plot_path = "png25ns_2l_mAODv2_2100_noPU_new/%s_offZ_standard_isOS-leadingLepIsTight-njet2-nbtag1-met80-metSig5-dPhiJet0-dPhiJet1/" % m
    for fh in ["leadingLepIso"]:
        for swap in ["L1", "L2"]:
            for fs in ["mm","me","em","ee"]:
                ofile = os.path.join(plot_directory, plot_path,  "dl_mt2ll_%s_swap%s_%s.pkl"%(fh, swap, fs))
                if os.path.isfile(ofile):
                    logger.info( "Loading %s", ofile )
                    histos["%s_mt2ll_%s_swap%s_%s"%(m, fh, swap, fs)] = pickle.load( file( ofile) )
                else:
                    logger.warning( "File not found: %s", ofile)

def transpose(l):
    return list(map(list, zip(*l)))

def add_histos( l ):
    res = l[0].Clone()
    for h in l[1:]: res.Add(h)
    return res

for name, fss in [
    ['doubleMu', ['mm']], 
    ['muEle', ['me', 'em']], 
    ['doubleEle', ['ee']]
    ]:
    to_be_added = []
Пример #2
0
        else:
            sig, fac = s, 1
        try:
            stack.append([eval(sig)])
            if hasattr(stack[-1][0], "scale"):
                stack[-1][0].scale *= fac
            elif fac != 1:
                stack[-1][0].scale = fac
            else:
                pass

            if fac != 1:
                stack[-1][0].name += " x" + str(fac)
            logger.info("Adding sample %s with factor %3.2f", sig, fac)
        except NameError:
            logger.warning("Could not add signal %s", s)

sequence = []

rev = reversed if args.reversed else lambda x: x
for i_comb in rev(range(len(cuts) + 1)):
    #for i_comb in [len(cuts)]:
    for comb in itertools.combinations(cuts, i_comb):

        if not args.noData:
            data_sample.setSelectionString([dataFilterCut, trigger])
        for sample in mc:
            sample.setSelectionString([mcFilterCut, trigger])

        presel = preselection + list(comb)
Пример #3
0
            sig, fac = split[0], int(split[1])
        else:
            sig, fac = s, 1
        try:
            stack.append( [eval(sig)] )
            if hasattr(stack[-1][0], "scale"): 
                stack[-1][0].scale*=fac
            elif fac!=1:
                stack[-1][0].scale = fac
            else: pass

            if fac!=1:
                stack[-1][0].name+=" x"+str(fac)                
            logger.info( "Adding sample %s with factor %3.2f", sig, fac)
        except NameError:
            logger.warning( "Could not add signal %s", s)

sequence = []
#if args.zMode == 'onZ':
#
#    def makeUParaUPerp( data ):
#         
#        qx = data.dl_pt*cos(data.dl_phi)  
#        qy = data.dl_pt*sin(data.dl_phi)
#
#        ux = -data.met_pt*cos(data.met_phi) - qx 
#        uy = -data.met_pt*sin(data.met_phi) - qy
#        data.upara = (ux*qx+uy*qy)/data.dl_pt
#        data.uperp = (ux*qy-uy*qx)/data.dl_pt
#
#    sequence.append( makeUParaUPerp )
Пример #4
0
        else:
            sig, fac = s, 1
        try:
            stack.append([eval(sig)])
            if hasattr(stack[-1][0], "scale"):
                stack[-1][0].scale *= fac
            elif fac != 1:
                stack[-1][0].scale = fac
            else:
                pass

            if fac != 1:
                stack[-1][0].name += " x" + str(fac)
            logger.info("Adding sample %s with factor %3.2f", sig, fac)
        except NameError:
            logger.warning("Could not add signal %s", s)


def fromTau(gen_lepton):
    return gen_lepton['n_tau'] > 0


def prompt(gen_lepton):
    return not fromTau(
        gen_lepton) and gen_lepton['n_B'] == 0 and gen_lepton['n_D'] == 0


def nonPrompt(gen_lepton):
    return not fromTau(gen_lepton) and not (gen_lepton['n_B'] == 0
                                            and gen_lepton['n_D'] == 0)
Пример #5
0
    raise ValueError( "Need at least one sample. Got %r",samples )

lumiScaleFactor = xSection*targetLumi/float(sample.normalization) if xSection is not None else None

from StopsDilepton.tools.puReweighting import getReweightingFunction
puRW        = getReweightingFunction(data="PU_2100_XSecCentral", mc="Fall15")
puRWDown    = getReweightingFunction(data="PU_2100_XSecDown", mc="Fall15")
puRWUp      = getReweightingFunction(data="PU_2100_XSecUp", mc="Fall15")


# output directory
outDir = os.path.join(options.targetDir, options.processingEra, "gen", sample.name)

if os.path.exists(outDir) and options.overwrite:
    if options.nJobs > 1:
        logger.warning( "NOT removing directory %s because nJobs = %i", outDir, options.nJobs )
    else:
        logger.info( "Output directory %s exists. Deleting.", outDir )
        shutil.rmtree(outDir)

try:    #Avoid trouble with race conditions in multithreading
    os.makedirs(outDir)
    logger.info( "Created output directory %s.", outDir )
except:
    pass

new_variables = [ 'weight/F']
read_variables = [] 
read_variables+= [Variable.fromString('nTrueInt/F')]
# reading gen particles for top pt reweighting
read_variables.append( Variable.fromString('ngenPartAll/I') )
Пример #6
0
         if not '_reHadd_' in f:
             to_skip = False
             for skip in options.skip:
                 if skip in f:
                     logger.info("Found skip string %s in %s. Skipping.",
                                 skip, f)
                     to_skip = True
                     break
             if to_skip: continue
             isOK =  checkRootFile( full_filename, checkForObjects = [options.treeName]) \
                     if options.treeName is not None else checkRootFile( full_filename )
             if isOK:
                 rootFiles.append(f)
             else:
                 logger.warning(
                     "File %s does not look OK. Checked for tree: %r",
                     full_filename, options.treeName)
         else:
             logger.info("Found '_reHadd_' in file %s in %s. Skipping.",
                         full_filename, dirName)
 job = []
 jobsize = 0
 for fname in rootFiles:
     filename, file_extension = os.path.splitext(fname)
     n_str = filename.split('_')[-1]
     if n_str.isdigit():
         full_filename = os.path.join(dirName, fname)
         jobsize += os.path.getsize(full_filename)
         job.append(full_filename)
         if jobsize > 1024**3 * options.sizeGB:
             jobs.append(job)
    #Calculating systematics
    h_summed = {k: plot_mc[k].histos_added[0][0].Clone() for k in plot_mc.keys()}

    #Normalize systematic shapes
    for k in h_summed.keys():
        if k is None: continue
        try:
            bin_low  = h_summed[None].FindBin(x_norm[0])
            bin_high = h_summed[None].FindBin(x_norm[1])
            # if 'njet' in plot_mc[k].name.lower(): print "before", k,h_summed[k], bin_low, bin_high, h_summed[k].Integral(bin_low, bin_high), [ h_summed[k][i] for i in range(10) ]
            h_summed[k].Scale(
                h_summed[None].Integral(bin_low, bin_high) / h_summed[k].Integral(bin_low, bin_high)
            )
            # if 'njet' in plot_mc[k].name.lower(): print "after", k,h_summed[k], bin_low, bin_high, h_summed[k].Integral(bin_low, bin_high), [ h_summed[k][i] for i in range(10) ]
        except ZeroDivisionError:
            logger.warning( "Found zero for variation %s of variable %s", k, plot_mc[k].name )


    h_rel_err = h_summed[None].Clone()
    h_rel_err.Reset()

    h_sys = {}
    for k, s1, s2 in sys_pairs:
        h_sys[k] = h_summed[s1].Clone()
        h_sys[k].Scale(-1)
        h_sys[k].Add(h_summed[s2])
        # h_sys[k].Divide(h_summed[None])

    h_rel_err = h_summed[None].Clone()
    h_rel_err.Reset()
Пример #8
0
lumiScaleFactor = xSection * targetLumi / float(
    sample.normalization) if xSection is not None else None

from StopsDilepton.tools.puReweighting import getReweightingFunction
puRW = getReweightingFunction(data="PU_2100_XSecCentral", mc="Fall15")
puRWDown = getReweightingFunction(data="PU_2100_XSecDown", mc="Fall15")
puRWUp = getReweightingFunction(data="PU_2100_XSecUp", mc="Fall15")

# output directory
outDir = os.path.join(options.targetDir, options.processingEra, "gen",
                      sample.name)

if os.path.exists(outDir) and options.overwrite:
    if options.nJobs > 1:
        logger.warning("NOT removing directory %s because nJobs = %i", outDir,
                       options.nJobs)
    else:
        logger.info("Output directory %s exists. Deleting.", outDir)
        shutil.rmtree(outDir)

try:  #Avoid trouble with race conditions in multithreading
    os.makedirs(outDir)
    logger.info("Created output directory %s.", outDir)
except:
    pass

new_variables = ['weight/F']
read_variables = []
read_variables += [Variable.fromString('nTrueInt/F')]
# reading gen particles for top pt reweighting
read_variables.append(Variable.fromString('ngenPartAll/I'))
Пример #9
0
     if f.endswith('.root'):
         full_filename = os.path.join(dirName, f)
         if not '_reHadd_' in f:
             to_skip = False
             for skip in options.skip:
                 if skip in f:
                     logger.info( "Found skip string %s in %s. Skipping.", skip, f )
                     to_skip = True
                     break
             if to_skip: continue
             isOK =  checkRootFile( full_filename, checkForObjects = [options.treeName]) \
                     if options.treeName is not None else checkRootFile( full_filename )
             if isOK:
                 rootFiles.append( f )
             else:
                 logger.warning( "File %s does not look OK. Checked for tree: %r", full_filename, options.treeName )
         else:
             logger.info( "Found '_reHadd_' in file %s in %s. Skipping.", full_filename, dirName )
 job = []
 jobsize = 0
 for fname in rootFiles:
     filename, file_extension = os.path.splitext(fname)
     n_str = filename.split('_')[-1]
     if n_str.isdigit():
         full_filename = os.path.join(dirName, fname)
         jobsize += os.path.getsize( full_filename  )
         job.append( full_filename )
         if jobsize>1024**3*options.sizeGB:
             jobs.append(job)
             job = []
             jobsize = 0
Пример #10
0
            sig, fac = split[0], int(split[1])
        else:
            sig, fac = s, 1
        try:
            stack.append( [eval(sig)] )
            if hasattr(stack[-1][0], "scale"): 
                stack[-1][0].scale*=fac
            elif fac!=1:
                stack[-1][0].scale = fac
            else: pass

            if fac!=1:
                stack[-1][0].name+=" x"+str(fac)                
            logger.info( "Adding sample %s with factor %3.2f", sig, fac)
        except NameError:
            logger.warning( "Could not add signal %s", s)

def fromTau( gen_lepton ):
    return gen_lepton['n_tau']>0
def prompt( gen_lepton ):
    return not fromTau( gen_lepton) and gen_lepton['n_B']==0 and gen_lepton['n_D']==0
def nonPrompt( gen_lepton ):
    return not fromTau( gen_lepton) and not ( gen_lepton['n_B']==0 and gen_lepton['n_D']==0 ) 

gen_ttbar_sequence = []

# Match l1 and l2
def matchLeptons( data ):
    # Get Gen leptons
    gen_leps = getCollection(data, "GenLep", ["pt", "eta", "phi", "n_t", "n_W", "n_B", "n_D", "n_tau", "pdgId"], "nGenLep" )
    non_prompt = filter(lambda l: nonPrompt(l), gen_leps )
Пример #11
0
    logger.info( "MET %3.2f genMET %3.2f pt(neutrinos from W) %s pt(neutrinos from tau from W) %s pt(other neutrinos) %s", \
        reader.data.met_pt, reader.data.met_genPt, str_neutrinos_fromW, str_neutrinos_fromTau, str_neutrinos_other )

    # Missing energy related
    met_pt = reader.data.met_pt
    met_phi = reader.data.met_phi
    met_genPt = reader.data.met_genPt
    met_genPhi = reader.data.met_genPhi
    delta_met = sqrt( ( met_pt*cos(met_phi)-met_genPt*cos(met_genPhi) )**2 + ( met_pt*sin(met_phi)-met_genPt*sin(met_genPhi) )**2 )

    mt2Calc.reset()
    mt2Calc.setMet( met_pt, met_phi )
    mt2Calc.setLeptons( reader.data.l1_pt, reader.data.l1_eta, reader.data.l1_phi, reader.data.l2_pt, reader.data.l2_eta, reader.data.l2_phi )
    if not mt2Calc.mt2ll() != reader.data.dl_mt2ll:
        logger.warning( "MT2 inconsistency!" )
    mt2Calc.setMet( met_genPt, met_genPhi )
    mt2ll_genMet = mt2Calc.mt2ll()
    if mt2ll_genMet<140:
        logger.info(bold("MET mismeasurement"))

    jets = [ getObjDict(cmg_reader.data, "Jet_", ["pt","eta","phi","mcPt","id"], i) for i in range(cmg_reader.data.nJet) ] 
    dx, dy = 0., 0.
    for j in jets:
        if j['mcPt']>0:
            dx+=(j['pt']-j['mcPt'])*cos(j['phi'])
            dy+=(j['pt']-j['mcPt'])*cos(j['phi'])
        if abs(j['pt'] - j['mcPt'])>50:
            logger.info( "Mismeasured jet pt %3.2f mcPt %3.2f diff %3.2f, eta %3.2f phi %3.2f id %i", j['pt'], j['mcPt'], j['pt']-j['mcPt'], j['eta'], j['phi'], j['id'] )
        corr_pt = localJetResponse.corrected_jet_pt(j['pt'], j['eta'], j['phi'])
        local_mism = corr_pt - j['pt']
Пример #12
0
        k: plot_mc[k].histos_added[0][0].Clone()
        for k in plot_mc.keys()
    }

    #Normalize systematic shapes
    for k in h_summed.keys():
        if k is None: continue
        try:
            bin_low = h_summed[None].FindBin(x_norm[0])
            bin_high = h_summed[None].FindBin(x_norm[1])
            # if 'njet' in plot_mc[k].name.lower(): print "before", k,h_summed[k], bin_low, bin_high, h_summed[k].Integral(bin_low, bin_high), [ h_summed[k][i] for i in range(10) ]
            h_summed[k].Scale(h_summed[None].Integral(bin_low, bin_high) /
                              h_summed[k].Integral(bin_low, bin_high))
            # if 'njet' in plot_mc[k].name.lower(): print "after", k,h_summed[k], bin_low, bin_high, h_summed[k].Integral(bin_low, bin_high), [ h_summed[k][i] for i in range(10) ]
        except ZeroDivisionError:
            logger.warning("Found zero for variation %s of variable %s", k,
                           plot_mc[k].name)

    h_rel_err = h_summed[None].Clone()
    h_rel_err.Reset()

    h_sys = {}
    for k, s1, s2 in sys_pairs:
        h_sys[k] = h_summed[s1].Clone()
        h_sys[k].Scale(-1)
        h_sys[k].Add(h_summed[s2])
        # h_sys[k].Divide(h_summed[None])

    h_rel_err = h_summed[None].Clone()
    h_rel_err.Reset()

    # Adding in quadrature