def GetTotalUncertainyTable(self, hRate, hSystUp, hSystDown, hLine, align): table = [] # Calculate total uncertainty rateNominalSum = hRate.Integral() rateSystUpSum = hSystUp.Integral() rateSystDownSum = hSystDown.Integral() signalUncert = 0.0 ctrlUncert = 0.0 ratio = 1.0 ratioSigma = 0.0 nBinsX = hRate.GetNbinsX() # For-loop: All bins in histo (up) for i in range(1, nBinsX + 1): signalUncert += hSystUp.GetBinError(i)**2 ctrlUncert += hSystDown.GetBinError(i)**2 # Sanity check if rateSystUpSum > 0.0 and rateSystDownSum > 0.0: # Calculate ratio and its error with error propagation ratio = rateSystUpSum / rateSystDownSum # Calculate ratio error with error propagation ratioSigma = errorPropagationForDivision(rateSystUpSum, sqrt(signalUncert), rateSystDownSum, sqrt(ctrlUncert)) # Calculate % errors up/down table.append(hLine) sigmaUp = (ratio + ratioSigma - 1.0) * 100 sigmaDown = (ratio - ratioSigma - 1.0) * 100 rangeX = "%s to %s" % (hRate.GetBinCenter(1), hRate.GetBinCenter(nBinsX)) rangeBins = "1 to %d" % (nBinsX) table.append( align.format(rangeBins, rangeX, "%.1f" % rateNominalSum, "%.1f" % rateSystUpSum, "%.1f" % rateSystDownSum, "%.1f" % (sigmaUp), "%.1f" % (sigmaDown))) evtYield = "{:^85}".format( "Events +/- stat. +/- syst. = %.1f +/- %.1f +/- %.1f" % (rateNominalSum, abs(rateNominalSum - rateSystUpSum), abs(rateNominalSum - rateSystDownSum))) table.append(ShellStyles.HighlightAltStyle() + evtYield + ShellStyles.NormalStyle()) table.append(hLine) return table
import subprocess from subprocess import Popen, PIPE import os import sys import datetime from optparse import OptionParser import HiggsAnalysis.NtupleAnalysis.tools.ShellStyles as ShellStyles #================================================================================================ # Variable definition #================================================================================================ ss = ShellStyles.SuccessStyle() ns = ShellStyles.NormalStyle() ts = ShellStyles.NoteStyle() hs = ShellStyles.HighlightAltStyle() es = ShellStyles.ErrorStyle() #================================================================================================ # Function Definitions #================================================================================================ def Verbose(msg, printHeader=False): if not VERBOSE: return if printHeader: print "=== submitCondor.py:" if msg != "": print "\t", msg return
import datasets as datasetsTest import HiggsAnalysis.NtupleAnalysis.tools.dataset as dataset import HiggsAnalysis.NtupleAnalysis.tools.aux as aux import HiggsAnalysis.NtupleAnalysis.tools.git as git import HiggsAnalysis.NtupleAnalysis.tools.ShellStyles as ShellStyles #================================================================================================ # Global Definitions #================================================================================================ _debugMode = False _debugPUreweighting = False _debugMemoryConsumption = False sh_Error = ShellStyles.ErrorStyle() sh_Success = ShellStyles.SuccessStyle() sh_Note = ShellStyles.HighlightAltStyle() sh_Normal = ShellStyles.NormalStyle() #================================================================================================ # Function Definition #================================================================================================ def Verbose(msg, printHeader=False): ''' Calls Print() only if verbose options is set to true. ''' if not _debugMode: return Print(msg, printHeader) return
def main(opts): # Object for selecting data eras, search modes, and optimization modes myModuleSelector = analysisModuleSelector.AnalysisModuleSelector() # Obtain dsetMgrCreator and register it to module selector dsetMgrCreator = dataset.readFromMulticrabCfg(directory=opts.mcrab) # Obtain systematics names mySystematicsNamesRaw = dsetMgrCreator.getSystematicVariationSources() mySystematicsNames = getSystematicsNames(mySystematicsNamesRaw, opts) # Set the primary source Verbose( "Setting the primary source (label=%s)" % (ShellStyles.NoteStyle() + opts.analysisName + ShellStyles.NormalStyle()), True) myModuleSelector.setPrimarySource(label=opts.analysisName, dsetMgrCreator=dsetMgrCreator) # Select modules myModuleSelector.doSelect(opts=None) #fixme: (opts=opts)? # Loop over era/searchMode/optimizationMode combos myTotalModules = myModuleSelector.getSelectedCombinationCount() * ( len(mySystematicsNames) + 1) * len(opts.shape) Verbose("Found %s modules in total" % (myTotalModules), True) count, nEras, nSearchModes, nOptModes, nSysVars = myModuleSelector.getSelectedCombinationCountIndividually( ) if nSysVars > 0: msg = "Running over %d modules (%d eras x %d searchModes x %d optimizationModes x %d systematic variations)" % ( count, nEras, nSearchModes, nOptModes, nSysVars) else: msg = "Running over %d modules (%d eras x %d searchModes x %d optimizationModes)" % ( count, nEras, nSearchModes, nOptModes) Verbose(msg, True) # Create pseudo-multicrab creator msg = "Will create pseudo-dataset %s inside the pseudo-multicrab directory" % ( ShellStyles.NoteStyle() + opts.analysisName + ShellStyles.NormalStyle()) Verbose(msg, True) myOutputCreator = pseudoMultiCrabCreator.PseudoMultiCrabCreator( opts.analysisName, opts.mcrab, verbose=opts.verbose) # Make time stamp for start time myGlobalStartTime = time.time() iModule = 0 # For-loop: All Shapes for iShape, shapeType in enumerate(opts.shape, 1): msg = "Shape %d/%d:%s %s" % (iShape, len( opts.shape), ShellStyles.NormalStyle(), shapeType) Verbose(ShellStyles.HighlightAltStyle() + msg, True) # Initialize myOutputCreator.initialize( subTitle=shapeType, prefix="") #fixme: remove shapeType from sub-directory name? # Get lists of settings erasList = myModuleSelector.getSelectedEras() modesList = myModuleSelector.getSelectedSearchModes() optList = myModuleSelector.getSelectedOptimizationModes() if 0: optList.append( "" ) #append the default opt mode iff more optimization modes exist # For-loop: All eras for era in erasList: # For-loop: All searchModes for searchMode in modesList: # For-loop: AlloptimizationModes for optimizationMode in optList: Verbose( "era = %s, searchMode = %s, optMode = %s" % (era, searchMode, optimizationMode), True) # If an optimization mode is defined in options skip the rest if opts.optMode != None: if optimizationMode != opts.optMode: continue # Obtain normalization factors for given Era, SearchMode, and OptimizationMode! myNormFactors = importNormFactors(era, searchMode, optimizationMode, opts.mcrab) # Nominal module myModuleInfoString = getModuleInfoString( era, searchMode, optimizationMode) iModule += 1 # Inform user of what is being processes if optimizationMode != "": msg = "Module %d/%d: %s_%s_%s_%s" % ( iModule, myTotalModules, opts.analysisName, searchMode, era, optimizationMode) else: msg = "Module %d/%d: %s_%s_%s" % ( iModule, myTotalModules, opts.analysisName, searchMode, era) Print( ShellStyles.HighlightAltStyle() + msg + ShellStyles.NormalStyle(), iModule == 1) # Keep time myStartTime = time.time() Verbose("Create dataset manager with given settings", True) nominalModule = ModuleBuilder(opts, myOutputCreator, opts.verbose) nominalModule.createDsetMgr(opts.mcrab, era, searchMode, optimizationMode) if (iModule == 1): if opts.verbose: nominalModule.debug() # Build the module doFakeBNormalisationSyst = False nominalModule.buildModule( opts.dataSrc, opts.ewkSrc, myNormFactors[opts.normFactorKey], doFakeBNormalisationSyst, opts.normDataSrc, opts.normEwkSrc) # Do TF variations named "SystVarUp" and "SystVarDown" (i.e. (Get results using TF+Error and TF-Error instead of TF) if len(mySystematicsNames) > 0: Verbose( "Adding FakeB normalization systematics (iff also other systematics present) ", True) #nominalModule.buildTransferFactorVarSystModule(opts.dataSrc, opts.ewkSrc, myNormFactors["SystVarUp"], myNormFactors["SystVarDown"]) nominalModule.buildTransferFactorVarSystModule( opts.dataSrc, opts.ewkSrc, myNormFactors) Verbose("Deleting nominal module", True) nominalModule.delete() Verbose("Printing time estimate", True) printTimeEstimate(myGlobalStartTime, myStartTime, iModule, myTotalModules) Verbose("Now do the rest of systematics variations", True) for syst in mySystematicsNames: iModule += 1 msg = "Module %d/%d: %s/%s" % ( iModule, myTotalModules, myModuleInfoString, syst) print Print( ShellStyles.HighlightAltStyle() + msg + ShellStyles.NormalStyle(), False) myStartTime = time.time() systModule = ModuleBuilder(opts, myOutputCreator) # Create dataset manager with given settings systModule.createDsetMgr(opts.mcrab, era, searchMode, optimizationMode, systematicVariation=syst) # Build systematics module Verbose( "Building systematics module (opts.normFactorKey = %s)" % (opts.normFactorKey), True) systModule.buildModule( opts.dataSrc, opts.ewkSrc, myNormFactors[opts.normFactorKey], False, opts.normDataSrc, opts.normEwkSrc) printTimeEstimate(myGlobalStartTime, myStartTime, iModule, myTotalModules) systModule.delete() print Verbose("Pseudo-multicrab ready for %s" % shapeType, True) # Print some timing statistics Verbose( "Average processing time per module was %.1f seconds" % getAvgProcessTimeForOneModule(myGlobalStartTime, myTotalModules), True) Print( "Total elapsed time was %.1f seconds" % getTotalElapsedTime(myGlobalStartTime), True) # Create rest of pseudo multicrab directory myOutputCreator.finalize(silent=False) return
def buildTransferFactorVarSystModule(self, dataPath, ewkPath, normFactors): #Up, normFactorsDown): ''' This function re-calculates all histograms as normal and stores them into folders with extensions: "Hplus2tbAnalysis_<opts.searchMode>_<opts.era>_SystVarTransferFactorUp" "Hplus2tbAnalysis_<opts.searchMode>_<opts.era>_SystVarTransferFactorDown" The difference is that instead of using the nominal Transfer Factors (TF) for a given FakeB measurement bin (e.g. ldg b-jet eta) it does it using an up/down variation of it: TF_Up = TF + Error TF_Down = TF - Error where the error values and errors of the TFs are calculated in FakeBNormalization.py using the CalculateTransferFactor() method of FakeBNormalizationManager class object. The error in this case is calculated by using error propagation: TF = CR1/CR2 TF_Error = ErrorPropagationForDivision(TF) = sqrt((sigmaA/b)**2 + (a/(b**2)*sigmaB)**2) where: A = Integral of CR1 histogram (from ROOT) sigmaA = Integral Error for CR1 histogram (from ROOT) ''' # Up variation of Transfer Factors print #Print(ShellStyles.HighlightAltStyle() + "TF+Error Variation" + ShellStyles.NormalStyle() , True) Print( ShellStyles.HighlightAltStyle() + "Extra Module: SystVarTransferFactorPlus" + ShellStyles.NormalStyle(), False) mySystModulePlus = pseudoMultiCrabCreator.PseudoMultiCrabModule( self._dsetMgr, self._era, self._searchMode, self._optimizationMode, "SystVarTransferFactorUp", #self._systematicVariation opts.analysisNameSaveAs, opts.verbose) self._transferFactorPlusResult = fakeBResult.FakeBResultManager( dataPath, ewkPath, self._dsetMgr, self._luminosity, self.getModuleInfoString(), normFactors["SystVarUp"], optionDoFakeBNormalisationSyst=False, optionUseInclusiveNorm=self._opts.useInclusiveNorm, keyList=["AllSelections"], verbose=opts.verbose) # Up variation of Transfer Factors (3x) print Print( ShellStyles.HighlightAltStyle() + "Extra Module: SystVarTransferFactor3xPlus" + ShellStyles.NormalStyle(), False) mySystModule3xPlus = pseudoMultiCrabCreator.PseudoMultiCrabModule( self._dsetMgr, self._era, self._searchMode, self._optimizationMode, "SystVarTransferFactor3xUp", #self._systematicVariation opts.analysisNameSaveAs, opts.verbose) # Create new list with TF + 3xError self._transferFactorPlusResult3x = fakeBResult.FakeBResultManager( dataPath, ewkPath, self._dsetMgr, self._luminosity, self.getModuleInfoString(), normFactors["SystVar3xUp"], optionDoFakeBNormalisationSyst=False, optionUseInclusiveNorm=self._opts.useInclusiveNorm, keyList=["AllSelections"], verbose=opts.verbose) # Add the plots mySystModulePlus.addPlots( self._transferFactorPlusResult.getShapePlots(), self._transferFactorPlusResult.getShapePlotLabels()) mySystModule3xPlus.addPlots( self._transferFactorPlusResult3x.getShapePlots(), self._transferFactorPlusResult3x.getShapePlotLabels()) # Save "_SystVarTransferFactorUp" folder to pseudo-dataset pseudo-multicrab self._outputCreator.addModule(mySystModulePlus) self._outputCreator.addModule(mySystModule3xPlus) # Down variation of Transfer Factors print Print( ShellStyles.HighlightAltStyle() + "Extra Module: SystVarTransferFactorMinus" + ShellStyles.NormalStyle(), False) mySystModuleMinus = pseudoMultiCrabCreator.PseudoMultiCrabModule( self._dsetMgr, self._era, self._searchMode, self._optimizationMode, "SystVarTransferFactorDown", opts.analysisNameSaveAs, opts.verbose) self._transferFactorMinusResult = fakeBResult.FakeBResultManager( dataPath, ewkPath, self._dsetMgr, self._luminosity, self.getModuleInfoString(), normFactors["SystVarDown"], optionDoFakeBNormalisationSyst=False, optionUseInclusiveNorm=self._opts.useInclusiveNorm, keyList=["AllSelections"], verbose=opts.verbose) # Down variation of Transfer Factors (3x) print Print( ShellStyles.HighlightAltStyle() + "Extra Module: SystVarTransferFactor3xMinus" + ShellStyles.NormalStyle(), False) mySystModule3xMinus = pseudoMultiCrabCreator.PseudoMultiCrabModule( self._dsetMgr, self._era, self._searchMode, self._optimizationMode, "SystVarTransferFactor3xDown", opts.analysisNameSaveAs, opts.verbose) self._transferFactorMinusResult3x = fakeBResult.FakeBResultManager( dataPath, ewkPath, self._dsetMgr, self._luminosity, self.getModuleInfoString(), normFactors["SystVar3xDown"], optionDoFakeBNormalisationSyst=False, optionUseInclusiveNorm=self._opts.useInclusiveNorm, keyList=["AllSelections"], verbose=opts.verbose) # Add the plots mySystModuleMinus.addPlots( self._transferFactorMinusResult.getShapePlots(), self._transferFactorMinusResult.getShapePlotLabels()) mySystModule3xMinus.addPlots( self._transferFactorMinusResult3x.getShapePlots(), self._transferFactorMinusResult3x.getShapePlotLabels()) # Save "_SystVarTransferFactorDown" folder to pseudo-dataset pseudo-multicrab self._outputCreator.addModule(mySystModuleMinus) self._outputCreator.addModule(mySystModule3xMinus) return
def __init__(self, dataPath, ewkPath, dsetMgr, luminosity, moduleInfoString, normFactors, optionCalculateQCDNormalizationSyst=True, normDataSrc = None, normEWKSrc = None, optionUseInclusiveNorm=False, verbose=False): self._shapePlots = [] self._shapePlotLabels = [] self._QCDNormalizationSystPlots = [] self._QCDNormalizationSystPlotLabels = [] self._moduleInfoString = moduleInfoString self._useInclusiveNorm = optionUseInclusiveNorm if len(normFactors.keys()) == 1 and normFactors.keys()[0] == "Inclusive": self._useInclusiveNorm = True self._verbose = verbose msg = "Obtaining final shape from data path \"%s\"" % (dataPath) Verbose(ShellStyles.HighlightStyle() + msg + ShellStyles.NormalStyle(), True) # Determine list of plots to consider myObjects = dsetMgr.getDataset("Data").getDirectoryContent(dataPath) # Ignore unwanted histograms and those designed for HToTauNu keywordList = ["JetEtaPhi"] ignoreList = [] for k in keywordList: ignoreList.extend(filter(lambda name: k in name, myObjects)) msg = "Ignoring a total of %s histograms:" % (len(ignoreList)) Print(ShellStyles.WarningLabel() + msg, True) for hName in ignoreList: print "\t", os.path.join(dataPath, hName) # Update myObjects list with filtered results myObjects = list(x for x in myObjects if x not in ignoreList) # For-Loop: All plots to consider for i, plotName in enumerate(myObjects, 1): # For testing #if "LdgTrijetMass_AfterAllSelections" not in plotName: # continue msg = "{:<9} {:>3} {:<1} {:<3} {:<50}".format("Histogram", "%i" % i, "/", "%s:" % (len(myObjects)), os.path.join(dataPath, plotName) ) Print(ShellStyles.HighlightAltStyle() + msg + ShellStyles.NormalStyle(), i==1) # Ensure that histograms exist dataOk = self._sanityChecks(dsetMgr, dataPath, plotName) ewkOk = self._sanityChecks(dsetMgr, ewkPath, plotName) Verbose("Obtaining shape plots (the returned object is not owned)", True) myShapeHisto = self._obtainShapeHistograms(i, dataPath, ewkPath, dsetMgr, plotName, luminosity, normFactors) # Obtain plots for systematics coming from met shape difference for control plots #FIXME-Systematics if optionCalculateQCDNormalizationSyst: if isinstance(myShapeHisto, ROOT.TH2): msg = "Skipping met shape uncertainty because histogram has more than 1 dimensions!" Print(ShellStyles.WarningLabel() + msg, True) else: self._obtainQCDNormalizationSystHistograms(myShapeHisto, dsetMgr, plotName, luminosity, normDataSrc, normEWKSrc) return
def main(count, runRange, dsetList, opts): # Apply TDR style style = tdrstyle.TDRStyle() style.setOptStat(True) style.setGridX(True) style.setGridY(True) # Obtain dsetMgrCreator and register it to module selector dsetMgrCreator = dataset.readFromMulticrabCfg(directory=opts.mcrab) # Get list of eras, modes, and optimisation modes erasList = dsetMgrCreator.getDataEras() modesList = dsetMgrCreator.getSearchModes() optList = dsetMgrCreator.getOptimizationModes() sysVarList = dsetMgrCreator.getSystematicVariations() sysVarSrcList = dsetMgrCreator.getSystematicVariationSources() # If user does not define optimisation mode do all of them if opts.optMode == None: if len(optList) < 1: optList.append("") optModes = optList else: optModes = [opts.optMode] # For-loop: All opt Mode for opt in optModes: opts.optMode = opt # Setup & configure the dataset manager datasetsMgr = GetDatasetsFromDir(opts) datasetsMgr.updateNAllEventsToPUWeighted() datasetsMgr.loadLuminosities() # from lumi.json if opts.verbose: datasetsMgr.PrintCrossSections() datasetsMgr.PrintLuminosities() # Remove datasets removeList = ["QCD-b", "Charged", "QCD", "ZJetsToQQ_HT600toInf"] opts.intLumi = 0.0 # For-loop: All datasets in the manager for d in datasetsMgr.getAllDatasets(): if d.isMC(): continue # Inclusive data if len(dsetList) == 1 and dsetList[0] == "Run2016": Verbose("Inclusive data. Will not remove anything", True) opts.intLumi += GetLumi(datasetsMgr) break # Special combinations for rr in dsetList: if rr not in d.getName(): Verbose( "\"%s\" is not in dataset name \"%s\"" % (rr, d.getName()), False) if d.getName() not in removeList: # Ensure dataset to be removed is not in the dsetList if not any(rr in d.getName() for rr in dsetList): removeList.append(d.getName()) else: Verbose( "\"%s\" is in dataset name \"%s\"" % (rr, d.getName()), False) # Get luminosity if a value is not specified opts.intLumi += d.getLuminosity() # For-loop: All dataset names to be removed for i, d in enumerate(removeList, 0): Verbose( ShellStyles.HighlightAltStyle() + "Removing dataset %s" % d + ShellStyles.NormalStyle(), False) datasetsMgr.remove( filter(lambda name: d in name, datasetsMgr.getAllDatasetNames())) # Inform user of dataset and corresponding integrated lumi Print("%d) %s (%.1f 1/pb)" % (count, runRange, opts.intLumi), count == 1) #Print("%d) %s (%.1f 1/pb)" % (count, ", ".join(dsetList), opts.intLumi), count==1) # Merge histograms (see NtupleAnalysis/python/tools/plots.py) plots.mergeRenameReorderForDataMC(datasetsMgr) # Print dataset information if 0: datasetsMgr.PrintInfo() # Merge EWK samples datasetsMgr.merge("EWK", aux.GetListOfEwkDatasets()) # Do the fit on the histo after ALL selections (incl. topology cuts) folderList = datasetsMgr.getDataset( datasetsMgr.getAllDatasetNames()[0]).getDirectoryContent( opts.folder) folderList1 = [h for h in folderList if opts.histoKey in h] folderList2 = [ h for h in folderList1 if "VR" in h or "SR" in h or "CRone" in h or "CRtwo" in h or "CRthree" in h or "CRfour" in h ] # For-loop: All folders histoPaths = [] for f in folderList2: folderPath = os.path.join(opts.folder, f) histoList = datasetsMgr.getDataset( datasetsMgr.getAllDatasetNames()[0]).getDirectoryContent( folderPath) pathList = [os.path.join(folderPath, h) for h in histoList] histoPaths.extend(pathList) binLabels = GetBinLabels("CRone", histoPaths) PlotHistosAndCalculateTF(runRange, datasetsMgr, histoPaths, binLabels, opts) return
def main(opts): # Apply TDR style style = tdrstyle.TDRStyle() style.setGridX(opts.gridx) style.setGridY(opts.gridy) style.setLogX(opts.logx) style.setLogY(opts.logy) # Create legend and set style histograms.createLegend.moveDefaults(dx=-0.1, dh=-0.15) histograms.uncertaintyMode.set(histograms.Uncertainty.StatOnly) styles.ratioLineStyle.append(styles.StyleLine(lineColor=13)) # Define some variables nameList = [] allShapeNuisances = [] signalTable = {} myDatacardPattern = "" myRootfilePattern = "" # Find out the mass points if opts.cardPattern == None: mySettings = limitTools.GeneralSettings(".", []) myDatacardPattern = mySettings.getDatacardPattern( limitTools.LimitProcessType.TAUJETS) myRootfilePattern = mySettings.getRootfilePattern( limitTools.LimitProcessType.TAUJETS) else: myDatacardPattern = opts.cardPattern.replace("MMM", "M%s").replace( "MM", "%s") myRootfilePattern = opts.rootfilePattern.replace("MMM", "M%s").replace( "MM", "%s") # Get mass points to consider massPoints = DatacardReader.getMassPointsForDatacardPattern( ".", myDatacardPattern) Print( "The following masses will be considered: %s" % (ShellStyles.HighlightAltStyle() + ", ".join(massPoints) + ShellStyles.NormalStyle()), True) # For-loop: All mass points for i, m in enumerate(massPoints, 1): # Obtain luminosity from the datacard myLuminosity = float( limitTools.readLuminosityFromDatacard(".", myDatacardPattern % m)) # Do the plots doPlot(opts, int(m), nameList, allShapeNuisances, myLuminosity, myDatacardPattern, myRootfilePattern, signalTable) # Print signal table Print("Max contracted uncertainty for signal:", True) table = [] align = "{:>15} {:>15} {:>15}" hLine = "=" * 50 table.append(hLine) table.append(align.format("Systematic", "Minimum", "Maximum")) table.append(hLine) # For-loop: All signal for i, k in enumerate(signalTable.keys(), 1): # Print("Key = %s" % (k), False) minVal = "%.3f" % (signalTable[k]["min"]) maxVal = "%.3f" % (signalTable[k]["max"]) msg = align.format(k, minVal, maxVal) table.append(msg) table.append(hLine) for row in table: Print(row, False) msg = "All results under directory %s" % ( ShellStyles.SuccessStyle() + opts.dirName + ShellStyles.NormalStyle()) Print(msg, True) return
def doPlot(opts, mass, nameList, allShapeNuisances, luminosity, myDatacardPattern, rootFilePattern, signalTable): fName = rootFilePattern % mass f = ROOT.TFile.Open(fName) content = f.GetListOfKeys() # Suppress the warning message of missing dictionary for some iterator backup = ROOT.gErrorIgnoreLevel ROOT.gErrorIgnoreLevel = ROOT.kError diriter = content.MakeIterator() ROOT.gErrorIgnoreLevel = backup # Find the datacard and nuisance names myCardReader = DatacardReader.DataCardReader(".", mass, myDatacardPattern, rootFilePattern) myDatasetNames = myCardReader.getDatasetNames() # Find the name stem and the name of the uncertainties datasets = [] shapes = [] for d in myDatasetNames: myLabel = d myStatus = not d in nameList if d == myDatasetNames[0]: myStatus = True if not str(mass) in d: myLabel = "%sm%d" % (d, mass) myShapeNuisanceNames = myCardReader.getShapeNuisanceNames(d) myFilteredShapeNuisances = [] for n in myShapeNuisanceNames: if not "statBin" in n and not n.endswith( "_statUp") and not n.endswith("_statDown"): myFilteredShapeNuisances.append(n) if myStatus: myDataset = DatasetContainer(column=d, label=myLabel, nuisances=myFilteredShapeNuisances, cardReader=myCardReader, verbose=opts.verbose) datasets.append(myDataset) nameList.append(d) for n in myFilteredShapeNuisances: if not n in shapes: shapes.append(n) rebinList = None if opts.h2tb: rebinList = systematics._dataDrivenCtrlPlotBinning[ "LdgTetrajetMass_AfterAllSelections"] ## Do the actual plots for i, d in enumerate(datasets, 1): if opts.verbose: d.debug() msg = "{:>10}, {:<20}".format("m = %d GeV" % (mass), d.GetName()) if i < len(datasets): Print( ShellStyles.HighlightAltStyle() + msg + ShellStyles.NormalStyle(), False) else: Print(ShellStyles.SuccessStyle() + msg + ShellStyles.NormalStyle(), False) d.doPlot(opts, shapes, f, mass, luminosity, signalTable, rebinList) Verbose("Closing ROOT file %s" % (fName), True) f.Close()
def writeModuleToRootFileAlt(self, rootfile): ''' By default when an histogram is created, it is added to the list of histogram objects in the current directory in memory. Remove reference to this histogram from current directory and add reference to new directory dir. dir can be 0 in which case the histogram does not belong to any directory. ''' self.Verbose( "Dumping all objects to ROOT file %s" % (rootfile.GetName()), True) # Go to base directory rootfile.cd("/") # Create TDirectory self.Verbose( "Creating directory %s%s%s in ROOT file %s" % (ShellStyles.HighlightAltStyle(), self._moduleName, ShellStyles.NormalStyle(), os.path.basename(rootfile.GetName())), True) myModuleDir = rootfile.mkdir(self._moduleName) # Save information for h in self._shapes: h.SetDirectory(myModuleDir) folders = [] # For-loop: All plots for h in self._dataDrivenControlPlots: # Got to base dir rootfile.cd("/") # Determine names hName = h.GetName().split("/")[-1] path = h.GetName().replace("/" + hName, "") nDepth = len(path.split("/")) h.SetName(hName) if nDepth > 1: folder = os.path.join(self._moduleName, path) #raise Exception("Histogram %s has more than 1 folder structure. Such a provision does not exists and must be added!" % (h.GetName()) ) else: folder = os.path.join(self._moduleName, path) # Keep track of existing folders if folder not in folders: self.Verbose( "Creating TDirectory %s in ROOT file %s" % (folder, rootfile.GetName()), True) rootfile.mkdir(folder) folders.append(folder) # Go to folder rootfile.cd(folder) myDir = rootfile.CurrentDirectory() self.Verbose( "Setting directory %s for histogram %s" % (folder, h.GetName()), True) h.SetDirectory(myDir) # For debugging if 0: rootfile.ls() # Save splittedBinInfo self._hSplittedBinInfo.SetDirectory(myModuleDir) # Create config info for the module myConfigInfoDir = myModuleDir.mkdir("configInfo") self._hConfigInfo = ROOT.TH1F("configinfo", "configinfo", 2, 0, 2) self._hConfigInfo.GetXaxis().SetBinLabel(1, "control") self._hConfigInfo.SetBinContent(1, 1) self._hConfigInfo.GetXaxis().SetBinLabel(2, "luminosity") self._hConfigInfo.SetBinContent(2, self._luminosity) self._hConfigInfo.SetDirectory(myConfigInfoDir) myConfigInfoDir.Add(self._dataVersion) return
def _createAndSubmit(self): # Go to base directory os.chdir(self._basedir) Verbose("Current working directory is %s" % os.getcwd(), True) # Create jobs myPath = os.path.join(os.getenv("HIGGSANALYSIS_BASE"), "NtupleAnalysis/src/LimitCalc/work") if not os.path.exists(myPath): raise Exception("Error: Could not find directory '%s'!" % myPath) myCommand = os.path.join(myPath, "generateMultiCrabTaujets.py") if self._opts.combination: myCommand = os.path.join(myPath, "generateMultiCrabCombination.py") if self._opts.brlimit: myCommand += " --brlimit" else: myCommand += " --sigmabrlimit" myGridStatus = True if hasattr(self._opts, "lepType") and self._opts.lepType: myCommand += " --lep" raise Exception( "The LEP type CLs is no longer supported. Please use --lhcasy (asymptotic LHC-type CLs." ) if hasattr(self._opts, "lhcType") and self._opts.lhcType: myCommand += " --lhc" raise Exception( "The LHC type CLs is no longer supported. Please use --lhcasy (asymptotic LHC-type CLs." ) if hasattr(self._opts, "lhcTypeAsymptotic") and self._opts.lhcTypeAsymptotic: myCommand += " --lhcasy" myGridStatus = False if myGridStatus: myCommand += " --create" if not self._opts.nomlfit: myCommand += " --mlfit" if self._opts.significance: myCommand += " --significance" if self._opts.unblinded: myCommand += " --final" #msg = "Creating jobs with command:\n\t%s" % myCommand msg = myCommand Verbose( ShellStyles.HighlightAltStyle() + msg + ShellStyles.NormalStyle(), True) os.system(myCommand) # asymptotic jobs are run on the fly if myGridStatus: # Change to job directory self._findJobDir(".") if self._jobDir == None: raise Exception( "Error: Could not find 'LandSMultiCrab' or 'CombineMultiCrab' in a sub directory name under the base directory '%s'!" % self._basedir) os.chdir(self._jobDir) # Submit jobs msg = "Submitting multicrab jobs" Verbose(msg, True) proc = subprocess.Popen(["multicrab", "-submit all"], stdout=subprocess.PIPE) (out, err) = proc.communicate() print out # Change directory back s = self._backToTopLevel() if len(s) > 1: os.chdir(s) # print "current dir =",os.getcwd() return
if len(myDirs) == 0: raise Exception( "Error: Could not find any sub directories starting with 'datacards_' below this directory!" ) myDirs.sort() Verbose("Found %s datacard directories" % (len(myDirs)), True) myResults = [] # For-loop: All datacard directories for counter, d in enumerate(myDirs, 1): msg = "{:<9} {:>3} {:<1} {:<3} {:<50}".format("Directory", "%i" % counter, "/", "%i:" % len(myDirs), "%s" % d) Print( ShellStyles.HighlightAltStyle() + msg + ShellStyles.NormalStyle(), counter == 1) myResults.append(Result(opts, d)) # Inform user of success msg = "{:<9} {:>3} {:<1} {:<3} {:<50}".format("Directory", "%i" % counter, "/", "%i:" % len(myDirs), "Success") Print(ShellStyles.SuccessStyle() + msg + ShellStyles.NormalStyle(), True) Verbose("The results are stored in the following directories:", counter == 1) for i, r in enumerate(myResults, 1): msg = str(i) + ") " + r.getBaseDir()