def initCache(self, cacheDir): if cacheDir: self.cacheDir = cacheDir cacheFileName = os.path.join(cacheDir, self.name + '.pkl') if not os.path.exists(os.path.dirname(cacheFileName)): os.makedirs(os.path.dirname(cacheFileName)) self.cache = Cache(cacheFileName, verbosity=2) else: self.cache = None
def __init__(self, name, cacheDir=None, useTop16009=False): super(DataDrivenTTZEstimate, self).__init__(name, cacheDir=cacheDir) self.nJets = (3, -1) # jet selection (min, max) self.nLooseBTags = (2, -1) # loose bjet selection (min, max) self.nMediumBTags = (0, -1) # bjet selection (min, max) self.useTop16009 = useTop16009 self.ratioTop16009 = 1.27 # self.sysErrTop16009 = (-0.17, +0.20) self.statErrTop16009 = (-0.37, +0.42) # Because we are going to reuse a lot of yields which otherwise will be terribly slow self.helperCacheName = os.path.join('.', 'helperCache.pkl') self.helperCache = Cache(self.helperCacheName, verbosity=2)
from StopsDilepton.analysis.u_float import u_float from math import sqrt ##https://twiki.cern.ch/twiki/bin/viewauth/CMS/SUSYSignalSystematicsRun2 from StopsDilepton.tools.user import combineReleaseLocation from StopsDilepton.tools.cardFileWriter import cardFileWriter limitPrefix = options.regions limitDir = os.path.join(setup.analysis_results, setup.prefix(), 'cardFiles', limitPrefix) overWrite = False useCache = True verbose = True if not os.path.exists(limitDir): os.makedirs(limitDir) cacheFileName = os.path.join(limitDir, 'calculatedLimits.pkl') limitCache = Cache(cacheFileName, verbosity=2) def wrapper(s): c = cardFileWriter.cardFileWriter() c.releaseLocation = combineReleaseLocation cardFileName = os.path.join(limitDir, s.name + '.txt') if not os.path.exists(cardFileName) or overWrite: counter = 0 c.reset() c.addUncertainty('PU', 'lnN') c.addUncertainty('topPt', 'lnN') c.addUncertainty('JEC', 'lnN') c.addUncertainty('JER', 'lnN') c.addUncertainty('SFb', 'lnN')