Ejemplo n.º 1
0
                       action='store',
                       default='102X_TTG_ppv1_v7')
argParser.add_argument('--selection',
                       action='store',
                       default='dilepOS-nLepVeto2')
argParser.add_argument(
    '--small',
    action='store_true',
    help='Run only on a small subset of the data?',
)
args = argParser.parse_args()

# Logger
import Analysis.Tools.logger as logger
import RootTools.core.logger as logger_rt
logger = logger.get_logger(args.logLevel, logFile=None)
logger_rt = logger_rt.get_logger(args.logLevel, logFile=None)

if args.small: args.plot_directory += "_small"


# Text on the plots
def drawObjects(lumi_scale):
    tex = ROOT.TLatex()
    tex.SetNDC()
    tex.SetTextSize(0.04)
    tex.SetTextAlign(11)  # align right
    lines = [
        (0.15, 0.95, 'CMS #bf{#it{Preliminary}}'),
        (0.68, 0.95, '%3.1f fb^{-1} (13 TeV)' % lumi_scale),
    ]
try:
    fromDPM = sys.modules['__main__'].fromEOS != "True"
except:
    fromDPM = not "clip" in os.getenv("HOSTNAME").lower()

print postprocessing_directory_
# Redirector
try:
    redirector = sys.modules["__main__"].redirector
except:
    from TTGammaEFT.Tools.user import redirector as redirector

# Logging
if __name__ == "__main__":
    import Analysis.Tools.logger as logger
    logger = logger.get_logger("INFO", logFile=None)
    import RootTools.core.logger as logger_rt
    logger_rt = logger_rt.get_logger("INFO", logFile=None)
else:
    import logging
    logger = logging.getLogger(__name__)

logger.info("Loading MC samples from directory %s",
            os.path.join(data_directory_, postprocessing_directory_))

# Directories
dirs = {}
dirs["DY_LO"] = ["DYJetsToLL_M50_LO_ext1_comb"]
dirs["DY_NLO"] = ["DYJetsToLL_M50_ext2"]
dirs["TT_pow"] = ["TTLep_pow_CP5"]
dirs["ZG_lowMLL"] = ["ZGToLLG_lowMLL"]
Ejemplo n.º 3
0
        '''
        return os.path.exists( os.path.join( self.directory, self.__get_filename(key) ) )

    def add(self, key, data, overwrite=False):

        filename = os.path.join( self.directory, self.__get_filename(key)) 
        try: # errors can appear in parallel processing
            if not os.path.isdir(os.path.dirname( filename )):
                os.makedirs( os.path.dirname( filename ) )
        except:
            pass

        if not overwrite:
            if os.path.exists( filename ):
                logger.warning( "Already found key '%r'. Do not store data.", key )
                return data
        pickle.dump( data, file( filename, 'w' ) )
        return data

if __name__ == "__main__":
    import Analysis.Tools.logger as logger
    logger    = logger.get_logger( "DEBUG", logFile = None)

    import ROOT

    dirDB = DirDB("./test")

    dirDB.add('y',1)
    dirDB.add(3,1)
    dirDB.add((2,3),ROOT.TH1F('x','x',100,0,1))
Ejemplo n.º 4
0
                           nargs="?",
                           type=int,
                           default=2,
                           help="Interpolation order for EFT weights.")

    return argParser


options = get_parser().parse_args()

# Logging
import Analysis.Tools.logger as logger
logFile = "/tmp/%s_%s_njob%s.txt" % (
    "_".join(options.samples), os.environ["USER"],
    str(0 if options.nJobs == 1 else options.job))
logger = logger.get_logger(options.logLevel, logFile=logFile)

import RootTools.core.logger as logger_rt
logger_rt = logger_rt.get_logger(options.logLevel, logFile=None)

#Samples: Load samples
maxN = None
if options.small:
    maxN = 1000
    options.job = 1
    options.nJobs = 200

# Load all samples to be post processed
from TTGammaEFT.Samples.genTuples_TTGamma_EFT import *
#from TTGammaEFT.Samples.genTuples_TTGamma_Herwig import *
samples = map(eval, options.samples)
Ejemplo n.º 5
0
                       type=str,
                       default='trilepM-onZ1')
argParser.add_argument('--trainingFraction',
                       action='store',
                       type=float,
                       default=0.5)
argParser.add_argument('--small', action='store_true')
argParser.add_argument('--overwrite', action='store_true')

args = argParser.parse_args()

#Logger
import tWZ.Tools.logger as logger
logger = logger.get_logger("INFO", logFile=None)
import Analysis.Tools.logger as logger_an
logger_an = logger_an.get_logger("INFO", logFile=None)

if args.plot_directory == None:
    args.plot_directory = plot_directory

if args.selection == None:
    selectionString = "(1)"
else:
    selectionString = cutInterpreter.cutString(args.selection)

# Samples
#from tWZ.samples.nanoTuples_RunII_nanoAODv6_private_postProcessed    import *
from tWZ.samples.nanoTuples_Summer16_nanoAODv6_private_postProcessed import *

signal = WZ
#signal.reduceFiles(factor=20)
Ejemplo n.º 6
0
#Also works the other way around!
import Analysis.Tools.syncer
import os

import Analysis.Tools.logger as logger

logger = logger.get_logger('DEBUG', logFile=None)

if not os.path.isdir('www'):
    os.makedirs('www')

# ROOT example
import ROOT

c1 = ROOT.TCanvas()
c1.Print('www/x.png')
c1.Print('www/y.pdf')

#pickle example
import pickle

x = {}
pickle.dump('x', file('www/z.pkl', 'w'))
Ejemplo n.º 7
0
                       help="Log level for logging")
argParser.add_argument('--sample', action='store', type=str)
argParser.add_argument('--config', action='store', type=str)
argParser.add_argument('--output_directory',
                       action='store',
                       type=str,
                       default='.')
argParser.add_argument('--small', action='store_true')

args = argParser.parse_args()

#Logger
import tWZ.Tools.logger as logger
logger = logger.get_logger(args.logLevel, logFile=None)
import Analysis.Tools.logger as logger_an
logger_an = logger_an.get_logger(args.logLevel, logFile=None)
import RootTools.core.logger as logger_rt
logger_rt = logger_rt.get_logger(args.logLevel, logFile=None)

subDir = args.config

#config
config = getattr(configs, args.config)

sample_names = []
found = False
for sample in config.training_samples:
    if args.sample == sample.name:
        found = True
        break  # found it
    else: