Ejemplo n.º 1
0
from RootTools.core.standard import *

# Samples
#directory = "/afs/hephy.at/data/rschoefbeck02/postProcessed/flat_jet_trees/v6/"
#sample = Sample.fromDirectory( "DoubleMuon", os.path.join( directory, "DoubleMuon_Run2018C-17Sep2018-v1_MINIAOD" ), isData = True)
from JetMET.diagnosis.pu2018.samples import *
from JetMET.tools.user import plot_directory
sample = RelVal_DoubleMuon_Run2018D_Ref

#
# Logger
#
import JetMET.tools.logger as logger
import RootTools.core.logger as logger_rt
logger    = logger.get_logger(   'INFO', logFile = None)
logger_rt = logger_rt.get_logger('INFO', logFile = None)

# define TProfiles
#bx_thresholds  = [i for i in range(3000)]
#prefix = ''
bx_thresholds  = [i for i in range(500)]
prefix = 'zoomed_'

common = [
      (0.15, 0.95, 'Run2018C (13 TeV)'), 
]

variables = [ 
    [ "ChBarrelSumPt",  "ch_m1p5_1p5_sumPt",                      [(0.7, 0.85, "sumPt(PF ch) |#eta|<1.5")]],
    [ "NhEnBarrelSumPt",  "nh_m1p5_1p5_sumPt",                      [(0.7, 0.85, "sumPt(PF nh) |#eta|<1.5")]],
    [ "PhEnEC1SumPt",  "ga_m2p5_m1p5_sumPt+ga_1p5_2p5_sumPt",                      [(0.7, 0.85, "sumPt(PF #gamma) 1.5<|#eta|<2.5")]],
Ejemplo n.º 2
0
        action='store',
        nargs='?',
        type=str,
        default='postProcessed_Fall15_mAODv2',
        help="Name of the processing era"
        )

    return argParser

options = get_parser().parse_args()

# Logging
import StopsDilepton.tools.logger as logger
logger = logger.get_logger(options.logLevel, logFile = None )
import RootTools.core.logger as logger_rt
logger_rt = logger_rt.get_logger(options.logLevel, logFile = None )

#Samples: Load samples
from StopsDilepton.samples.helpers import fromHeppySample
samples = [ fromHeppySample(s, data_path = options.dataDir, maxN = None) for s in options.samples ]

xSection = samples[0].heppy.xSection

#Samples: combine if more than one
if len(samples)>1:
    sample_name =  samples[0].name+"_comb"
    logger.info( "Combining samples %s to %s.", ",".join(s.name for s in samples), sample_name )
    sample = Sample.combine(sample_name, samples, maxN = maxN)
    # Clean up
    for s in samples:
        sample.clear()
Ejemplo n.º 3
0
argParser.add_argument('--maxEvents',          action='store',      type=int, default=-1, help='Maximum number of events')
argParser.add_argument('--maxFiles',           action='store',      type=int, default=-1, help='Maximum number of files')
argParser.add_argument('--targetDir',          action='store',      default='hem/v2')
argParser.add_argument('--sample',             action='store',      default='/DYJetsToLL_M-50_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIAutumn18MiniAOD-102X_upgrade2018_realistic_v15-v1/MINIAODSIM')
argParser.add_argument('--nJobs',              action='store',      nargs='?', type=int, default=1,  help="Maximum number of simultaneous jobs.")
argParser.add_argument('--job',                action='store',      nargs='?', type=int, default=0,  help="Run only job i")
argParser.add_argument('--overwrite',          action='store_true', help='overwrite?')#, default = True)
args = argParser.parse_args()
 
#
# Logger
#
import JetMET.tools.logger as logger
import RootTools.core.logger as logger_rt
logger    = logger.get_logger(   args.logLevel, logFile = None)
logger_rt = logger_rt.get_logger(args.logLevel, logFile = None)

if args.small: 
    args.targetDir += "_small"

sample_name = args.sample.lstrip('/').replace('/','_')
if args.small:
    maxN = 1
elif args.maxFiles>0:
    maxN = args.maxFiles
else:
    maxN = -1 

dbFile = os.path.join( cache_directory, 'JME_fwlite_cache.db' )
logger.info( "Using sample cache %s", dbFile )
sample = FWLiteSample.fromDAS( sample_name, args.sample, maxN = maxN, dbFile = dbFile)  
        type=int,
        help="Which year?"
        )


    return argParser

options = get_parser().parse_args()

# Logging
import StopsCompressed.Tools.logger as _logger
logFile = '/tmp/%s_%s_%s_njob%s.txt'%(options.skim, '_'.join(options.samples), os.environ['USER'], str(0 if options.nJobs==1 else options.job))
logger  = _logger.get_logger(options.logLevel, logFile = logFile)

import RootTools.core.logger as logger_rt
logger_rt = logger_rt.get_logger(options.logLevel, logFile = None )

# flags (I think string searching is slow, so let's not do it in the filler function)
isDiLep     =   options.skim.lower().startswith('dilep')
isTriLep     =   options.skim.lower().startswith('trilep')
isSingleLep =   options.skim.lower().startswith('singlelep')
isTiny      =   options.skim.lower().count('tiny') 
isSmall      =   options.skim.lower().count('small')
isInclusive  = options.skim.lower().count('inclusive') 
isVeryLoose =  'veryloose' in options.skim.lower()
isVeryLoosePt10 =  'veryloosept10' in options.skim.lower()
isLoose     =  'loose' in options.skim.lower() and not isVeryLoose
isJet250    = 'jet250' in options.skim.lower()

# Skim condition
skimConds = []
Ejemplo n.º 5
0
#Helper
from JetMET.tools.helpers import deltaR2

# argParser
import argparse
argParser = argparse.ArgumentParser(description = "Argument parser")
argParser.add_argument('--logLevel',
      action='store',
      nargs='?',
      choices=['CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG', 'TRACE', 'NOTSET'],
      default='INFO',
      help="Log level for logging"
)

args = argParser.parse_args()
logger = get_logger(args.logLevel, logFile = None)

#make samples. Samples are statisticall compatible.
jetHT_M2_5_500  = Sample.fromCMGOutput("jetHT_M2_5_500", baseDirectory = "/data/rschoefbeck/cmgTuples/JetHT_HCal/", chunkString = "JetHT_schoef-crab_JetHT_Run2015D_M2_5_500", treeFilename='tree.root')
jetHT           = Sample.fromCMGOutput("JetHT", baseDirectory = "/data/rschoefbeck/cmgTuples/JetHT_HCal/", chunkString = "JetHT_schoef-crab_JetHT_Run2015D", treeFilename='tree.root')
QCD_Pt_15to3000_M2_5_500 = Sample.fromCMGOutput("QCD_Pt_15to3000_M2_5_500", baseDirectory = "/data/rschoefbeck/cmgTuples/QCD_HCal/QCD_Pt_15to3000_M2_5_500", treeFilename='tree.root')
QCD_Pt_15to3000 = Sample.fromCMGOutput("QCD_Pt_15to3000", baseDirectory = "/data/rschoefbeck/cmgTuples/QCD_HCal/QCD_Pt_15to3000", treeFilename='tree.root')

variables =  map( Variable.fromString, ['evt/l','run/I', 'lumi/I', 'Jet[pt/F,rawPt/F,phi/F,eta/F]', 'nJet/I'] )

maxN = 10000

# define TProfiles
thresholds = [10**(x/10.) for x in range(11,36)]
jetResponse_data = ROOT.TProfile("response_data", "response_data", len(thresholds)-1, array.array('d', thresholds) )
jetResponse_data.texName = "JetHT 260431" 
Ejemplo n.º 6
0
def get_parser():
    import argparse
    argParser = argparse.ArgumentParser(
        description="Argument parser for samples file")
    argParser.add_argument('--overwrite',
                           action='store_true',
                           help="Overwrite current entry in db?")
    return argParser


# Logging
if __name__ == "__main__":
    import Samples.Tools.logger as logger
    logger = logger.get_logger("INFO", logFile=None)
    import RootTools.core.logger as logger_rt
    logger_rt = logger_rt.get_logger("INFO", logFile=None)
    options = get_parser().parse_args()
    ov = options.overwrite

else:
    import logging
    logger = logging.getLogger(__name__)
    ov = False

from Samples.Tools.config import dbDir, redirector_BE, redirector
dbFile = dbDir + "Autumn18_private.sql"

logger.info("Using db file: %s", dbFile)

## TTGamma private new samples
TTGamma_dilep_LO_A18_private = FWLiteSample.fromDPMDirectory(
Ejemplo n.º 7
0
argParser.add_argument('--plot_directory', action='store', default='gen')
#argParser.add_argument('--selection',          action='store',      default='njet2p-btag1p-relIso0.12-looseLeptonVeto-mll20-met80-metSig5-dPhiJet0-dPhiJet1')
argParser.add_argument(
    '--small',
    action='store_true',
    help='Run only on a small subset of the data?',
)
args = argParser.parse_args()

#
# Logger
#
import TopEFT.Tools.logger as logger
import RootTools.core.logger as logger_rt
logger = logger.get_logger(args.logLevel, logFile=None)
logger_rt = logger_rt.get_logger(args.logLevel, logFile=None)

if args.small: args.plot_directory += "_small"

# Import samples
from TopEFT.samples.skim_benchmarks import *

samples = map(eval, args.samples)


##
## Text on the plots
##
def drawObjects(hasData=False):
    tex = ROOT.TLatex()
    tex.SetNDC()
Ejemplo n.º 8
0
def get_parser():
    import argparse
    argParser = argparse.ArgumentParser(
        description="Argument parser for samples file")
    argParser.add_argument('--overwrite',
                           action='store_true',
                           help="Overwrite current entry in db?")
    return argParser


# Logging
if __name__ == "__main__":
    import Analysis.Tools.logger as logger
    logger = logger.get_logger("DEBUG", logFile=None)
    import RootTools.core.logger as logger_rt
    logger_rt = logger_rt.get_logger("DEBUG", logFile=None)
    options = get_parser().parse_args()
    ov = options.overwrite
else:
    import logging
    logger = logging.getLogger(__name__)
    ov = False

dbFile = cache_directory + "/samples/DB_TTZ_GEN.sql"

logger.info("Using db file: %s", dbFile)

ttZ_ll_LO_order2_15weights_ref = FWLiteSample.fromDAS(
    "ttZ_ll_LO_order2_15weights_ref",
    "/ttZ0j_rwgt_slc6_amd64_gcc630_CMSSW_9_3_0_tarball/llechner-ttZ0j_order2_15weights_18052018_ref-7a5fde3f5bf89006ee3acec926ca87d8/USER",
    "phys03",
Ejemplo n.º 9
0
from math import sqrt
# turn off graphics
ROOT.gROOT.SetBatch( True )

# RootTools
from RootTools.core.standard import *

from plot_helpers import getUncertaintyValue, getObservationValue
from multiprocessing import Pool

# Logger
import TTXPheno.Tools.logger as logger
import RootTools.core.logger as logger_rt
logger    = logger.get_logger(   'DEBUG', logFile = None)
logger_rt = logger_rt.get_logger('INFO', logFile = None)

# TTXPheno
from TTXPheno.samples.benchmarks import * 
from TTXPheno.Tools.user import plot_directory, cardfileLocation

# get the reweighting function
from TTXPheno.Tools.WeightInfo import WeightInfo

ROOT.gStyle.SetNumberContours(255)

# Arguments
import argparse

argParser = argparse.ArgumentParser(description = "Argument parser")
argParser.add_argument('--version',            action='store',     default='test', help='Appendix to plot directory')
    help="Chance the Target Labels of SM and BSM Hypothesis")
argParser.add_argument(
    '--random_state',
    action='store',
    default=0,
    type=int,
    nargs='?',
    help="The random state, which is given to the train_test_split method")
args = argParser.parse_args()

#Set the version of the script
vversion = 'v1'

#Logger
import RootTools.core.logger as Logger
logger = Logger.get_logger(args.logLevel, logFile=None)

#Kule algorithm
from kullback_leibler_divergence_criterion import KullbackLeiblerCriterion
kldc = KullbackLeiblerCriterion(1, np.array([2], dtype='int64'))

#setting up the file save name
version = vversion
if args.small:
    args.data_version += '_small'
    version += '_small'
if args.swap_hypothesis:
    version += '_swap'
version += '_maxDepth' + str(args.max_depth) + '_EstNum' + str(
    args.est_num) + '_BoostAlg' + str(
        args.boost_algorithm) + '_RandState' + str(args.random_state)
Ejemplo n.º 11
0
import os, sys
import ROOT
# RootTools
from RootTools.core.standard import *

results_directory = '/afs/hephy.at/data/cms07/StopsCompressed/fwlite_signals_fastSim/'

# sqlite3 sample cache file
dbFile = os.path.join(results_directory, 'sample_cache',
                      'fwlite_benchmarks.db')
overwrite = False

# Logging
if __name__ == "__main__":
    import RootTools.core.logger as logger_rt
    logger_rt = logger_rt.get_logger('DEBUG')

fwlite_signals_fastSim_Stops2l_200k = FWLiteSample.fromDAS(
    "fwlite_signals_fastSim_Stops2l_200k",
    "/Stops2l/schoef-Stops2l-393b4278a04aeb4c6106d6aae1db462e/USER",
    "phys03",
    dbFile=dbFile,
    overwrite=overwrite,
    prefix='root://hephyse.oeaw.ac.at/')

fwlite_signals_DisplacedStops_500_200 = FWLiteSample.fromDAS(
    "fwlite_signals_DisplacedStops_500_200",
    "/DisplacedStops-mstop-500-ctau-200/schoef-Stops2l-2514d262db8aee2ee8f9b68a132535de/USER",
    "phys03",
    dbFile=dbFile,
    overwrite=overwrite,
Ejemplo n.º 12
0
#!/usr/bin/env python
from StopsDilepton.analysis.Region import Region
from StopsDilepton.analysis.estimators import setup, DataDrivenDYEstimate
from StopsDilepton.samples.cmgTuples_Data25ns_mAODv2_postProcessed import *
import StopsDilepton.tools.logger as logger
logger = logger.get_logger("INFO", logFile = None )
import RootTools.core.logger as logger_rt
logger_rt = logger_rt.get_logger("INFO", logFile = None )


estimateDY = DataDrivenDYEstimate(name='DY-DD', cacheDir=None)
regionDY = Region('dl_mt2ll', (0,-1))

for channel, sample in setup.sample['Data'].iteritems():
    res = estimateDY.cachedEstimate(regionDY,channel,setup)
    print "\n Result in ", channel," for estimate ", estimateDY.name, regionDY,":", res#, 'jer',jer, 'jec', jec