Esempio n. 1
0
    def xsec( self, modified_couplings=None, overwrite=False, skip=False ):

        key = self.getKey( modified_couplings )
        # Do we have the x-sec?
        if self.xsecDB.contains(key) and not overwrite:
            logger.debug( "Found x-sec %s for key %r. Do nothing.", self.xsecDB.get(key), key )
            return self.xsecDB.get(key)
        elif skip:
            return u_float(0)
        else:
            print "Trying to get xsec"
            self.__initialize( modified_couplings ) 
            logger.info( "Calculating x-sec" )
            # rerun MG to obtain the correct x-sec (with more events)
            with open( os.path.join( self.processTmpDir, 'Cards/run_card.dat' ), 'a' ) as f:
                f.write( ".false. =  gridpack\n" )
            logger.info( "Calculate x-sec: Calling bin/generate_events" )
            output = subprocess.check_output( [ os.path.join( self.processTmpDir, 'bin/generate_events' ) , '-f' ] )
            for i in range(10):
                try:
                    output = subprocess.check_output( [ os.path.join( self.processTmpDir, 'bin/generate_events' ) , '-f' ] )
                    m = re.search( "Cross-section :\s*(.*) \pb", output )
                    logger.info( "x-sec: {} pb".format(m.group(1)) )
                    break
                except ValueError:
                    logger.info("Encountered problem during the MG run. Restarting.")

            xsec_ = u_float.fromString(m.group(1)) 
            
            self.xsecDB.add( key, xsec_, overwrite=True )

            logger.info( "Done!" )

            return xsec_
Esempio n. 2
0
    def observation(self, region, channel, setup, overwrite):

        if setup.nJet == "3p":
            setup4p = setup.sysClone(parameters={"nJet": (4, -1)})
            setup3 = setup.sysClone(parameters={"nJet": (3, 3)})
            return sum([
                self.cachedEstimate(region, channel, s, overwrite=overwrite)
                for s in [setup3, setup4p]
            ])

        if channel == "all":
            return sum([
                self.cachedEstimate(region, c, setup, overwrite=overwrite)
                for c in lepChannels
            ])

        elif channel == "SFtight":
            return sum([
                self.cachedEstimate(region, c, setup, overwrite=overwrite)
                for c in dilepChannels
            ])

        else:
            preSelection = setup.preselection("Data", channel=channel)
            #            cut = "&&".join([region.cutString(setup.sys['selectionModifier']), preSelection['cut']])
            cut = "&&".join([region.cutString(), preSelection['cut']])

            logger.debug("Using cut %s" % cut)

            weight = preSelection['weightStr']
            if hasattr(setup, "blinding") and setup.blinding:
                weight += "*" + setup.blinding

            return u_float(**self.process.getYieldFromDraw(
                selectionString=cut, weightString=weight))
Esempio n. 3
0
 def cachedTemplate(self,
                    selection,
                    weight='(1)',
                    save=True,
                    overwrite=False):
     key = {
         "selection": selection,
         "weight": weight,
         "source": self.source_sample.name
     }
     if (self.cache and self.cache.contains(key)) and not overwrite:
         result = self.cache.get(key)
         logger.info("Loaded MC PU profile from %s" %
                     (self.cache.database_file))
         logger.debug("Key used: %s result: %r" % (key, result))
     elif self.cache:
         logger.info("Obtain PU profile for %s" % (key, ))
         result = self.makeTemplate(selection=selection, weight=weight)
         if result:
             result = self.cache.addData(key, result, overwrite=save)
             logger.info("Adding PU profile to cache for %s : %r" %
                         (key, result))
         else:
             logger.warning(
                 "Couldn't create PU profile to cache for %s : %r" %
                 (key, result))
     else:
         result = self.makeTemplate(selection=selection, weight=weight)
     return result
Esempio n. 4
0
def wrapper(arg, y):
    r, channel, est, estimate = arg
    setup = setups[y][est]
    logger.debug(
        "Running transfer factor for region %s, channel %s in setup %s for QCD-DD"
        % (r, channel, est))
    estimate.initCache(setup.defaultCacheDir())
    res = estimate.cachedTransferFactor(r, channel, setup, checkOnly=True)
    return (arg, res)
Esempio n. 5
0
 def preselection(self, dataMC, channel="all", processCut=None):
     """Get preselection  cutstring."""
     cut = self.selection(dataMC,
                          channel=channel,
                          processCut=processCut,
                          **self.parameters)
     logger.debug("Using cut-string: %s", cut)
     if processCut:
         logger.info("Adding process specific cut: %s" % processCut)
     return cut
Esempio n. 6
0
    def writeToCache(self, region, channel, setup, value, signalAddon=None, save=True, overwrite=False, checkOnly=False):
        key =  self.uniqueKey(region, channel, setup, signalAddon=signalAddon)
        if (self.cache and self.cache.contains(key)) and not overwrite:
            res = self.cache.get(key)
#            if res.val != value.val: print "Warning, caches estimate not equal to input value: have %s, got %s"%(res, value)
#            logger.debug( "Loading cached %s result for %r : %r"%(self.name, key, res) )
        elif self.cache and not checkOnly:
            _res = self.cache.add( key, value, overwrite=True )
            res = value
            logger.debug( "Adding cached %s result for %r : %r" %(self.name, key, res) )
        else:
            res = u_float(-1,0)
        return res if res >= 0 or checkOnly else u_float(0,0)
Esempio n. 7
0
    def __initialize( self, modified_couplings = None ):

        # Initialize setup
        self.config.initialize( modified_couplings )

        # Write process card
        self.__writeProcessCard()
        
        logger.info( "Running MG executable: python %s -f %s", self.config.MG5_tmpdir + '/bin/mg5_aMC', self.tmpProcessCard )
        subprocess.check_output( [ "python", self.config.MG5_tmpdir + '/bin/mg5_aMC', '-f', self.tmpProcessCard ] )
        logger.info( "Done with MG executable" ) 

        # copy files from central gridpack
        for filename in [ 
            'grid_card.dat',
            'me5_configuration.txt',
            ]:
            logger.info( "Copying files from GP directory to temporary process directory: %s", filename )
            source = os.path.join( self.config.GP_tmpdir, 'process/madevent/Cards', filename )
            target = os.path.join( self.processTmpDir, 'Cards', filename )
            shutil.copyfile( source, target )
            logger.debug( "Done with %s -> %s", source, target )
        
        if self.run_card: 
            logger.info( "Copying provided run card: %s", self.run_card )
            target = os.path.join( self.processTmpDir, 'Cards', "run_card.dat" )
            shutil.copyfile( self.run_card, target )
            logger.debug( "Done with %s -> %s", source, target )
        
        else: 
            filename = "run_card.dat"
            logger.info( "Copying files from GP directory to temporary process directory: %s", filename )
            source = os.path.join( self.config.GP_tmpdir, 'process/madevent/Cards', filename )
            target = os.path.join( self.processTmpDir, 'Cards', filename )
            shutil.copyfile( source, target )
            logger.debug( "Done with %s -> %s", source, target )

        # copy reweight cards
        if self.reweight:
            source = os.path.join( self.config.data_path, 'template', 'template_reweight_card_' + self.config.model_name + '.dat' )
            target = os.path.join( self.processTmpDir, 'Cards', 'reweight_card.dat' )
            print target
            shutil.copyfile( source, target )
            if os.path.isfile( target ):
                logger.debug( "Done with %s -> %s", source, target )
            else:
                logger.info( "File copy failed. WTF!" )
        

        # Append to me5_configuration.txt 
        with open( os.path.join( self.processTmpDir, 'Cards/me5_configuration.txt' ), 'a') as f:
            f.write( "run_mode = 2\n" )
            f.write( "nb_core = 4\n" )
            f.write( "lhapdf = /cvmfs/cms.cern.ch/%s/external/lhapdf/6.1.6/share/LHAPDF/../../bin/lhapdf-config\n" % os.environ["SCRAM_ARCH"] )
            f.write( "automatic_html_opening = False\n" )

        # Append to run_card.dat
        with open( os.path.join( self.processTmpDir, 'Cards/run_card.dat' ), 'a' ) as f:
            f.write( "{}  =  nevents\n".format(self.nEvents) )
Esempio n. 8
0
    def initialize(self, modified_couplings=None):
        ''' Update the restriction card
        '''
        logger.info("#################### Model Setup ######################")

        self.__pre_initialize()

        # couplings
        modified_couplings = modified_couplings if modified_couplings is not None else {}

        # Check whether couplings are in the model
        for coup in modified_couplings.keys():
            if coup not in self.all_model_couplings:
                logger.error(
                    "Coupling %s not found in model %s. All available couplings: %s",
                    coup, self.model_name, ",".join(self.all_model_couplings))
                raise RuntimeError

        logger.debug('Creating restriction file based on template %s',
                     self.restrictCardTemplate)
        # make block strings to be inserted into template file
        block_strings = {}
        for block in self.model.keys():

            # copy defaults
            couplings = copy.deepcopy(self.model[block])

            # make modifications & build string for the template file
            block_strings[block + '_template_string'] = ""
            for i_coupling, coupling in enumerate(
                    couplings):  # coupling is a pair (name, value)
                if modified_couplings.has_key(coupling[0]):
                    coupling[1] = modified_couplings[coupling[0]]
                block_strings[block +
                              '_template_string'] += "%6i %8.6f # %s\n" % (
                                  i_coupling + 1, coupling[1], coupling[0])

        # read template file
        with open(self.restrictCardTemplate, 'r') as f:
            template_string = f.read()

        out = open(self.restrictCard, 'w')
        out.write(template_string.format(**block_strings))
        out.close()

        logger.info('Written restriction file %s', self.restrictCard)
        logger.info("################# Done: Model Setup ###################")
Esempio n. 9
0
def wrapper(arg):
    #        r,channel,set = arg
    channel, set = arg
    logger.debug("Running transfer factor, channel %s in setup %s for QCD-DD" %
                 (channel, args.controlRegion))
    res = estimate.cachedTransferFactor(channel,
                                        setup,
                                        qcdUpdates=args.customQCDUpdates,
                                        save=True,
                                        overwrite=args.overwrite,
                                        checkOnly=args.checkOnly)
    _ = estimate.cachedQCDMCTransferFactor(channel,
                                           setup,
                                           qcdUpdates=args.customQCDUpdates,
                                           save=True,
                                           overwrite=args.overwrite,
                                           checkOnly=args.checkOnly)
    return (arg, res)
Esempio n. 10
0
    def __pre_initialize(self):
        ''' Create temporary directories and unzip GP. Time consuming. '''

        if self.__isPreInitialized:
            logger.debug("Already pre-initialized in %s. Do nothing.",
                         self.uniquePath)
            return

        # Now begin with the work
        logger.info("############# pre-initialize Configuration ############")
        os.makedirs(self.uniquePath)

        # create new directories
        os.makedirs(self.GP_tmpdir)
        os.makedirs(self.MG5_tmpdir)

        # unzip MG tarball
        logger.info("Extracting madgraph")
        subprocess.call(
            ['tar', 'xaf', self.MG5_tarball, '--directory', self.uniquePath])

        # unzip gridpack for central config files
        logger.info("Extracting central gridpack")
        subprocess.call(
            ['tar', 'xaf', self.GP_tarball, '--directory', self.GP_tmpdir])

        # copy private UFO files from models in repository
        if not os.path.isdir(self.MG5_tmpdir + "/Models/" + self.model_name):
            logger.info("Copying UFO from private Model database for model %s",
                        self.model_name)
            src_dir = os.path.expandvars(
                '$CMSSW_BASE/src/TTGammaEFT/Generation/data/models/%s/UFO' %
                self.model_name)
            target_dir = os.path.join(self.MG5_tmpdir, "models",
                                      self.model_name)
            logger.debug("Copying UFO from %s to %s", src_dir, target_dir)
            shutil.copytree(src_dir, target_dir)
        else:
            logger.info("Using UFO from MG5 for model %s", self.model_name)

        self.__isPreInitialized = True

        logger.info("########## Done: pre-initialize Configuration #########")
Esempio n. 11
0
 def cachedObservation(self,
                       region,
                       channel,
                       setup,
                       save=True,
                       overwrite=False,
                       checkOnly=False):
     key = self.uniqueKey(region, channel, setup)
     if (self.cache and self.cache.contains(key)) and not overwrite:
         res = self.cache.get(key)
         logger.debug("Loading cached %s result for %r : %r" %
                      (self.name, key, res))
     elif self.cache and not checkOnly:
         res = self.observation(region, channel, setup, overwrite)
         _res = self.cache.add(key, res, overwrite=True)
         logger.debug("Adding cached %s result for %r" % (self.name, key))
     elif not checkOnly:
         res = self.observation(region, channel, setup, overwrite)
     else:
         res = u_float(-1, 0)
     return res if res >= 0 or checkOnly else u_float(0, 0)
Esempio n. 12
0
def wrapper(arg):
    r, c, s, inclusive = arg
    print r, c, s.sys["reweight"], inclusive
    logger.debug("Calculating estimate for %s in region %s and channel %s" %
                 (args.selectEstimator, r, c))
    if inclusive:
        res = estimateIncl.cachedEstimate(r,
                                          c,
                                          s,
                                          save=True,
                                          overwrite=args.overwrite,
                                          checkOnly=args.checkOnly)
        return (estimateIncl.uniqueKey(r, c, s), res)
    else:
        res = estimate.cachedEstimate(r,
                                      c,
                                      s,
                                      save=True,
                                      overwrite=args.overwrite,
                                      checkOnly=args.checkOnly)
        return (estimate.uniqueKey(r, c, s), res)
Esempio n. 13
0
    def weightString(self, dataMC, photon="PhotonGood0", addMisIDSF=False):
        lumiString = "(35.92*(year==2016)+41.53*(year==2017)+59.74*(year==2018))"
        _weightString = {}
        _weightString["Data"] = "weight"
        _weightString["MC"] = "*".join([self.sys["weight"]] + (
            self.sys["reweight"] if self.sys["reweight"] else []))
        _weightString["MC"] += "*%s" % lumiString

        if addMisIDSF and photon:
            if self.nJet == "2p": misIDSF_val = misID2pSF_val
            elif self.nJet == "3p": misIDSF_val = misID3pSF_val
            elif self.nJet == "4p": misIDSF_val = misID4pSF_val
            elif self.nJet == "2": misIDSF_val = misID2SF_val
            elif self.nJet == "3": misIDSF_val = misID3SF_val
            elif self.nJet == "4": misIDSF_val = misID4SF_val
            elif self.nJet == "5": misIDSF_val = misID5SF_val

            if self.year == "RunII":
                ws = "(%s)" % _weightString["MC"]
                ws16 = "+(%s*(%s_photonCatMagic==2)*(%f-1)*(year==2016))" % (
                    _weightString["MC"], photon, misIDSF_val[2016].val)
                ws17 = "+(%s*(%s_photonCatMagic==2)*(%f-1)*(year==2017))" % (
                    _weightString["MC"], photon, misIDSF_val[2017].val)
                ws18 = "+(%s*(%s_photonCatMagic==2)*(%f-1)*(year==2018))" % (
                    _weightString["MC"], photon, misIDSF_val[2018].val)
                _weightString["MC"] = ws + ws16 + ws17 + ws18
            else:
                _weightString["MC"] += "+%s*(%s_photonCatMagic==2)*(%f-1)" % (
                    _weightString["MC"], photon, misIDSF_val[self.year].val)

        if dataMC == "DataMC": return _weightString

        if dataMC == "Data": _weightString = _weightString["Data"]
        elif (dataMC == "MC" or dataMC == "MCpTincl"):
            _weightString = _weightString["MC"]
        logger.debug("Using weight-string: %s", _weightString)

        return _weightString
Esempio n. 14
0
 def cachedFakeFactor(self, region, channel, setup, overwrite=False, checkOnly=False):
     key =  self.uniqueKey(region, channel, setup)
     if (self.helperCache and self.helperCache.contains(key)) and not overwrite:
         res = self.helperCache.get(key)
         logger.debug( "Loading cached %s result for %r : %r"%(self.name, key, res) )
     elif self.helperCache and not checkOnly:
         logger.debug( "Calculating %s result for %r"%(self.name, key) )
         res = self._dataDrivenFakeCorrectionFactor( region, channel, setup, overwrite=overwrite )
         _res = self.helperCache.add( key, res, overwrite=True )
         logger.debug( "Adding cached transfer factor for %r : %r" %(key, res) )
     elif not checkOnly:
         res = self._dataDrivenFakeCorrectionFactor( region, channel, setup, overwrite=overwrite )
     else:
         res = u_float(-1,0)
     return res if res > 0 or checkOnly else u_float(0,0)
Esempio n. 15
0
 def cachedEstimate(self, region, channel, setup, signalAddon=None, save=True, overwrite=False, checkOnly=False):
     key =  self.uniqueKey(region, channel, setup, signalAddon=signalAddon)
     if (self.cache and self.cache.contains(key)) and not overwrite:
         res = self.cache.get(key)
         logger.debug( "Loading cached %s result for %r : %r"%(self.name, key, res) )
     elif self.cache and not checkOnly:
         logger.debug( "Calculating %s result for %r"%(self.name, key) )
         res = self._estimate( region, channel, setup, signalAddon=signalAddon, overwrite=overwrite )
         _res = self.cache.add( key, res, overwrite=True )
         logger.debug( "Adding cached %s result for %r : %r" %(self.name, key, res) )
     elif not checkOnly:
         res = self._estimate( region, channel, setup, signalAddon=signalAddon, overwrite=overwrite)
     else:
         res = u_float(-1,0)
     return res if res >= 0 or checkOnly else u_float(0,0)
Esempio n. 16
0
    def cachedQCDMCTransferFactor(self, channel, setup, qcdUpdates=None, save=True, overwrite=False, checkOnly=False):
        key =  self.uniqueKey("regionQCDMC", channel, setup, qcdUpdates=qcdUpdates)
        if (self.tfCache and self.tfCache.contains(key)) and not overwrite:
            res = self.tfCache.get(key)
            logger.debug( "Loading cached %s result for %r : %r"%(self.name, key, res) )
        elif self.tfCache and not checkOnly:
            logger.debug( "Calculating %s result for %r"%(self.name, key) )
#            res = self._dataDrivenTransferFactor( channel, setup, qcdUpdates=qcdUpdates, overwrite=overwrite )
            res = self._transferFactor( channel, setup, qcdUpdates=qcdUpdates, overwrite=overwrite )
            _res = self.tfCache.add( key, res, overwrite=True )
            logger.debug( "Adding cached transfer factor for %r : %r" %(key, res) )
        elif not checkOnly:
#            res = self._dataDrivenTransferFactor( channel, setup, qcdUpdates=qcdUpdates, overwrite=overwrite )
            res = self._transferFactor( channel, setup, qcdUpdates=qcdUpdates, overwrite=overwrite )
        else:
            res = u_float(-1,0)
        return res if res > 0 or checkOnly else u_float(0,0)
Esempio n. 17
0
                       default=0,
                       help="Run only job i")
argParser.add_argument('--sample',
                       action='store',
                       default='TTG_4WC_ref',
                       type=str,
                       help="Which sample to plot")
args = argParser.parse_args()

# Logger
import Analysis.Tools.logger as logger
import RootTools.core.logger as logger_rt
logger = logger.get_logger(args.logLevel, logFile=None)
logger_rt = logger_rt.get_logger(args.logLevel, logFile=None)

logger.debug("Start run_estimate.py")

if args.checkOnly: args.overwrite = False

if not args.controlRegion:
    logger.warning("ControlRegion not known")
    sys.exit(0)

# load and define the EFT sample
from TTGammaEFT.Samples.genTuples_TTGamma_EFT_postProcessed import *
eftSample = eval(args.sample)

#settings for eft reweighting
w = WeightInfo(eftSample.reweight_pkl)
w.set_order(args.order)
variables = w.variables
Esempio n. 18
0
    def _estimate(self,
                  region,
                  channel,
                  setup,
                  signalAddon=None,
                  overwrite=False):
        ''' Concrete implementation of abstract method 'estimate' as defined in Systematic
        '''

        logger.debug("MC prediction for %s channel %s" % (self.name, channel))

        if setup.nJet == "3p":
            setup4p = setup.sysClone(parameters={"nJet": (4, -1)})
            setup3 = setup.sysClone(parameters={"nJet": (3, 3)})
            return sum([
                self.cachedEstimate(region,
                                    channel,
                                    s,
                                    signalAddon=signalAddon,
                                    overwrite=overwrite)
                for s in [setup3, setup4p]
            ])

        if channel == 'all':
            # 'all' is the total of all contributions
            return sum([
                self.cachedEstimate(region,
                                    c,
                                    setup,
                                    signalAddon=signalAddon,
                                    overwrite=overwrite) for c in lepChannels
            ])

        elif channel == 'SFtight':
            # 'SFtight' is the total of mumutight and eetight contributions
            return sum([
                self.cachedEstimate(region,
                                    c,
                                    setup,
                                    signalAddon=signalAddon,
                                    overwrite=overwrite) for c in dilepChannels
            ])

        else:
            # change the sample processed if there is a signal addon like TuneUp
            if signalAddon:
                if self.name.split("_")[-1] in [
                        "gen", "misID", "had", "hp", "fake", "PU"
                ]:
                    name = "_".join(
                        self.name.split("_")[:-1] +
                        [signalAddon, self.name.split("_")[-1]])
                else:
                    name = "_".join([self.name, signalAddon])
                setattr(self, "process" + signalAddon, setup.processes[name])


#            preSelection = setup.preselection('MC' if not signalAddon else "MCpTincl", channel=channel, processCut=self.processCut)
            preSelection = setup.preselection('MC',
                                              channel=channel,
                                              processCut=self.processCut)
            cuts = [
                region.cutString(setup.sys['selectionModifier']),
                preSelection['cut']
            ]
            #            if setup.parameters["photonIso"] and setup.parameters["photonIso"] != "lowChgIsolowSieie":
            #                self.processCut = self.processCut.replace("photoncat", "photonhadcat")
            #            if self.processCut:
            #                cuts.append( cutInterpreter.cutString(self.processCut) )
            #                logger.info( "Adding process specific cut %s"%self.processCut )
            cut = "&&".join(cuts)
            weight = preSelection['weightStr']

            logger.debug("Using cut %s and weight %s" % (cut, weight))

            #            return setup.lumi/1000.*u_float(**getattr(self,"".join(["process",signalAddon if signalAddon else ""])).getYieldFromDraw(selectionString = cut, weightString = weight) )
            #            print cut, weight
            return u_float(**getattr(
                self, "".join(["process", signalAddon if signalAddon else ""])
            ).getYieldFromDraw(selectionString=cut, weightString=weight))
Esempio n. 19
0
    try:    os.makedirs( targetPath )
    except: pass
nExistingFiles    = len(os.listdir(targetPath))
if nExistingFiles > options.nJobs:
    raise Exception("Error: More files exist in target directory as should be processed! Check your nJobs input! Got nJobs %i, existing files: %i"%(options.nJobs, nExistingFiles))
logger.info("%i files exist in target directory! Processing %i files."%(nExistingFiles, options.nJobs))

# Single file post processing
if options.fileBasedSplitting or options.nJobs > 1:
    len_orig = len(sample.files)
    sample = sample.split( n=options.nJobs, nSub=options.job)
    if sample is None:  
        logger.info( "No such sample. nJobs %i, job %i numer of files %i", options.nJobs, options.job, len_orig )
        sys.exit(0)
    logger.info(  "fileBasedSplitting: Run over %i/%i files for job %i/%i."%(len(sample.files), len_orig, options.job, options.nJobs))
    logger.debug( "fileBasedSplitting: Files to be run over:\n%s", "\n".join(sample.files) )

# Directories
outputFilePath    = os.path.join( output_directory, sample.name + '.root' )
targetFilePath    = os.path.join( targetPath, sample.name + '.root' )
filename, ext = os.path.splitext( outputFilePath )

if os.path.exists( output_directory ) and options.overwrite:
    if options.nJobs > 1:
        logger.warning( "NOT removing directory %s because nJobs = %i", output_directory, options.nJobs )
    else:
        logger.info( "Output directory %s exists. Deleting.", outputFilePath )
        shutil.rmtree( outputFilePath, ignore_errors=True )

if not os.path.exists( output_directory ):
    try:
Esempio n. 20
0
    if os.path.exists( targetFileName ):
        if options.overwrite:
            logger.info( "Found file %s. Overwriting.", targetFileName )
            cmd.append( '-f' )
        else:
            logger.info( "File %s already exists. Skipping.", targetFileName )
            cmd = None
   
    from subprocess import call

    if cmd is not None: call(cmd + [targetFileName] + job) 
    
    if options.delete:
        isOK =  checkRootFile( targetFileName, checkForObjects = [options.treeName]) \
            if options.treeName is not None else checkRootFile( targetFileName ) 
        if isOK:
            for f in job:
                os.remove( f )
                logger.debug( "Deleted input %s", f)
        else:
            logger.warning( "File %s does not look OK. Checked for tree: %r. Did not delete input.", targetFileName, options.treeName )
    else:
        if not isOK: logger.warning( "File %s does not look OK. Checked for tree: %r.", targetFileName, options.treeName )
            

    if options.really:
        logger.info("Done.")
    else:
        logger.info("Done with nothing. Use --really to hadd and --delete to delete the input.")
 
Esempio n. 21
0
                       help="Number of Events")
argParser.add_argument('--logLevel',
                       action='store',
                       default='INFO',
                       nargs='?',
                       choices=defaultLogger,
                       help="Log level for logging")
argParser.add_argument('--makeGridpack',
                       action='store_true',
                       help="make gridPack?")
argParser.add_argument('--calcXSec',
                       action='store_true',
                       help="calculate x-sec?")
args = argParser.parse_args()

logger.debug("Coupling arguments: %r", args.couplings)

if not args.runCard.endswith(".dat"): args.runCard += ".dat"
runCardPath = os.path.join(run_path, args.runCard)

# Single argument -> interpret as file
if len(args.couplings) == 1 and os.path.isfile(args.couplings[0]):
    with open(args.couplings[0], 'r') as f:
        param_points = [line.rstrip().split() for line in f.readlines()]

# Interpret couplings
elif len(args.couplings) != 1:
    # make a list of the form [ ['c1', v1, v2, ...], ['c2', ...] ] so we can recurse in the couplings c1,c2,...
    coupling_list = []
    for a in args.couplings:
        try:
Esempio n. 22
0
        })

    return dictList


# Load File
logger.info("Now running on pp file %s and checking in directory %s",
            args.file, args.data_directory)
file = os.path.expandvars("$CMSSW_BASE/src/tWZ/postProcessing/%s.sh" %
                          args.file)
dictList = getDataDictList(file)
isData = "Run" in args.file
execCommand = []
for ppEntry in dictList:
    sample = ppEntry['sample']
    logger.debug("Checking sample %s" % sample)

    postfix = ""
    # check whether we do reduction:
    if "--reduceSizeBy" in ppEntry["command"]:
        postfix += "_redBy%i" % int(ppEntry["command"].split('--reduceSizeBy')
                                    [1].lstrip().split(' ')[0])
    dirPath = os.path.join(args.data_directory, ppEntry["dir"],
                           ppEntry["skim"], sample + postfix)
    # check whether we have an LHE HT cut:
    if "--LHEHTCut" in ppEntry["command"]:
        postfix += "_lheHT%i" % int(
            ppEntry["command"].split('--LHEHTCut')[1].lstrip().split(' ')[0])
    dirPath = os.path.join(args.data_directory, ppEntry["dir"],
                           str(ppEntry["year"]), ppEntry["skim"],
                           sample + postfix)
Esempio n. 23
0
    logger = logging.getLogger(__name__)

if args.file.endswith(".sh"):
    args.file = args.file.rstrip(".sh")

# Load File
logger.info("Now running on pp file %s" % args.file)
file = os.path.expandvars("$CMSSW_BASE/src/TTGammaEFT/postprocessing/%s.sh" %
                          args.file)
dictList = getDataDictList(file)
isData = "Run" in args.file
execCommand = []
allFiles = []
for ppEntry in dictList:
    sample = ppEntry['sample']
    logger.debug("Checking sample %s" % sample)

    # Check whether file exists on DPM, no check if root file is ok implemented for now
    if "clip" in hostname.lower():
        #        dirPath   = os.path.join( postprocessing_output_directory, ppEntry["dir"], ppEntry["skim"], sample  )
        dirPath = os.path.join(dpm_directory, "postprocessed", ppEntry["dir"],
                               ppEntry["skim"], sample)
        files = os.listdir(dirPath) if os.path.exists(dirPath) else []
        rootFiles = filter(
            lambda file: file.endswith(".root") and not file.startswith(
                "nanoAOD"), files)
        rootFiles = [item.split(".root")[0] for item in rootFiles]
        allFiles += [os.path.join(dirPath, r + ".root") for r in rootFiles]

    else:
        dirPath = os.path.join(dpm_directory, 'postprocessed', ppEntry["dir"],
Esempio n. 24
0
def make_batch_job( batch_job_file, batch_job_title, batch_output_dir , command ):
    # If X509_USER_PROXY is set, use existing proxy.
    if options.dpm:
        if host == 'lxplus':
            from StopsDilepton.Tools.user import cern_proxy_certificate
            proxy_location = cern_proxy_certificate
        else:
            proxy_location = None

        from RootTools.core.helpers import renew_proxy
        proxy = renew_proxy( proxy_location )

        logger.info( "Using proxy certificate %s", proxy )
        proxy_cmd = "export X509_USER_PROXY=%s"%proxy
    else:
        proxy_cmd = ""            

    import subprocess

    if host == 'heplx':
        template =\
"""\
#!/bin/sh -x
#SBATCH -J {batch_job_title}
#SBATCH -D {pwd}
#SBATCH -o {batch_output_dir}batch-test.%j.out

{proxy_cmd}
voms-proxy-info -all
eval \`"scram runtime -sh"\` 
echo CMSSW_BASE: {cmssw_base} 
echo Executing user command  
echo "{command}"
{command} 

voms-proxy-info -all

""".format(\
                command          = command,
                cmssw_base       = os.getenv("CMSSW_BASE"),
                batch_output_dir = batch_output_dir,
                batch_job_title  = batch_job_title,
                pwd              = os.getenv("PWD"),
                proxy_cmd = proxy_cmd
              )
    elif host == 'lxplus':
        template =\
"""\
#!/bin/bash
export CMSSW_PROJECT_SRC={cmssw_base}/src

cd $CMSSW_PROJECT_SRC
eval `scramv1 ru -sh`

alias python={python_release}
which python
python --version

{proxy_cmd}
voms-proxy-info -all
echo CMSSW_BASE: $CMSSW_BASE
cd {pwd}
echo Executing user command while in $PWD
echo "{command}"
{command} 

voms-proxy-info -all

""".format(\
                command          = command,
                cmssw_base       = os.getenv("CMSSW_BASE"),
                #batch_output_dir = batch_output_dir,
                #batch_job_title  = batch_job_title,
                pwd              = os.getenv("PWD"),
                proxy_cmd = proxy_cmd,
                python_release = subprocess.check_output(['which', 'python']).rstrip(), 
              )

    batch_job = file(batch_job_file, "w")
    batch_job.write(template)
    batch_job.close()

    logger.debug("Local batch job file: %s", batch_job_file)
    logger.debug("Batch job:\n%s", template)

    return
Esempio n. 25
0
                                          checkOnly=args.checkOnly)
        return (estimateIncl.uniqueKey(r, c, s), res)
    else:
        res = estimate.cachedEstimate(r,
                                      c,
                                      s,
                                      save=True,
                                      overwrite=args.overwrite,
                                      checkOnly=args.checkOnly)
        return (estimate.uniqueKey(r, c, s), res)


jobs = []

for region in regions:
    logger.debug("Queuing jobs for region %s", region)
    for c in channels:
        logger.debug("Queuing jobs for channel %s", c)
        jobs.append((region, c, setup, False))
        for var in variations:
            jobs.append(
                (region, c, setup.sysClone(sys={"reweight": [var]}), False))

logger.debug("Queuing jobs for channel %s", c)
jobs.append((noRegions[0], "all", setupIncl, True))
for var in variations:
    jobs.append((noRegions[0], "all",
                 setupIncl.sysClone(sys={"reweight": [var]}), True))

logger.info("Created %s jobs", len(jobs))
Esempio n. 26
0
def loopOverLogs( directory, filterCommand=None, addon=None, ignore="" ):

    logger.info("Checking log files in directory %s"%directory )

    files = list(filter(os.path.isfile, glob.glob(directory + "/*")))
#    files = list(filter(os.path.isfile, glob.glob(directory + "/*.err")))
    # sort logs by latest one
    files.sort(key=lambda x: -os.path.getmtime(x))

    # filter logs by latest cmd
    filteredFiles = []
    filteredCmds  = []
    for file in files:
        cmd = getCommandFromLog(file.replace(".err",".out"))

        if not cmd:
            logger.debug( "Could not find command for file %s"%file )
            continue

        # filter commands
        if filterCommand and not all( [c in cmd for c in filterCommand.split("*")] ): continue
        # check if a later version of the cmd has already been executed
        if cmd.replace(ignore,"").replace(" ","") not in [f.replace(ignore,"").replace(" ","") for f in filteredCmds]:
            filteredFiles.append(file)
            filteredCmds.append(cmd)

    logger.info("Loop over %i file%s"%(len(filteredFiles),"s" if len(filteredFiles)>1 else "") )

    # Loop over error filteredFiles
    for i, file in enumerate(filteredFiles):
        # filter for scripts like nanopostprocessing.py
        cmd = filteredCmds[i]

        with open( file, "r" ) as f:
            logs = f.readlines()

        logger.debug("Loop over %i entries in file"%len(logs))

        # Loop over error lines
        for log in logs:

            if any( word in log.lower() for word in trigger_words ):
                # Found error!
                logger.debug("Found error '%s' in file %s"%(log.split("\n")[0], file) )

                if any( expression.lower() in log.lower() for expression in accepted_errors ):
                    # Error is ok!
                    logger.debug("Error is accepted!")
                    continue

                # Found uncaught error!
                logger.info("Error in file %s:\t\t%s"%(file, log[:80].replace("\n"," ")))

                # logging error files and commands
                with open( "errorCommands.sh", "a" ) as ferr:
                    ferr.write( " ".join([cmd, addon if addon else "", "\n"]) )
                with open( "errorLogs.log", "a" ) as ferr:
                    ferr.write( file + "\n" )

                # Skipping rest of the log file
                break
Esempio n. 27
0
argParser.add_argument("--checkOnly",
                       action="store_true",
                       help="check values?")
#argParser.add_argument('--nJobs',             action='store',  default=1,      type=int,                             help="Maximum number of simultaneous jobs.")
#argParser.add_argument('--job',               action='store',  default=0,      type=int,                             help="Run only job i")
args = argParser.parse_args()

if args.year != "RunII": args.year = int(args.year)

# Logging
import Analysis.Tools.logger as logger
logger = logger.get_logger(args.logLevel, logFile=None)
import RootTools.core.logger as logger_rt
logger_rt = logger_rt.get_logger(args.logLevel, logFile=None)

logger.debug("Start run_transferFactor.py")

if args.customQCDUpdates:
    args.customQCDUpdates = customQCDTF_updates[args.customQCDUpdates]

if not args.controlRegion:
    logger.warning("ControlRegion not known")
    sys.exit(0)

parameters = allRegions[args.controlRegion]["parameters"]
channels = [args.mode] if args.mode != "all" else [
    "e", "mu"
]  #allRegions[args.controlRegion]["channels"]
#regions          = allRegions[args.controlRegion]["inclRegion"] #if allRegions[args.controlRegion]["noPhotonCR"] else allRegions[args.controlRegion]["inclRegion"] + allRegions[args.controlRegion]["regions"] + regionsTTG20To120 + regionsTTG120To220 + regionsTTG220
setup = Setup(year=args.year,
              photonSelection=False,