Ejemplo n.º 1
0
    def runCombineForAsymptotic(self, quietStatus=False):
        if not quietStatus:
            print "Running Combine for asymptotic limits, saving results to %s" % self.dirname
        f = open(os.path.join(self.dirname, "configuration.json"), "wb")
        json.dump(self.configuration, f, sort_keys=True, indent=2)
        f.close()

        self._results = commonLimitTools.ResultContainer(
            self.opts.unblinded, self.dirname)
        mass_point_counter = 1
        for mass in self.massPoints:
            print "Processing mass point %s (point %d/%d)..." % (
                mass, mass_point_counter, len(self.massPoints))
            myResult = self.clsType.runCombine(mass)
            if myResult.failed:
                if not quietStatus:
                    print "Fit failed for mass point %s, skipping ..." % mass
            else:
                self._results.append(myResult)
                if not quietStatus:
                    print "Processed successfully mass point %s, the result is %s" % (
                        mass, self._results.getResultString(mass))
            mass_point_counter += 1
        if not quietStatus:
            print ""
            print "\033[92mSummary of the results:\033[00m"
            self._results.print2()
            fname = self._results.saveJson()
            print "Wrote results to %s" % fname
Ejemplo n.º 2
0
 def runCombine(self, mass):
     result = commonLimitTools.Result(mass)
     if self.opts.limit:
         if self.opts.unblinded:
             self._runObservedAndExpected(result, mass)
         else:
             self._runBlinded(result, mass)
     else:
         print "Skipping limit for mass:", mass
     self._runMLFit(mass)
     self._runSignificance(mass)
     return result
Ejemplo n.º 3
0
    def runCombineForAsymptotic(self, quietStatus=False):
        if not quietStatus:
            print "Running Combine for asymptotic limits, saving results to %s" % self.dirname
        f = open(os.path.join(self.dirname, "configuration.json"), "wb")
        json.dump(self.configuration, f, sort_keys=True, indent=2)
        f.close()

        self._results = commonLimitTools.ResultContainer(
            self.opts.unblinded, self.dirname)
        for mass in self.massPoints:
            myResult = self.clsType.runCombine(mass)
            if myResult.failed:
                if not quietStatus:
                    print "Fit failed for mass point %s, skipping ..." % mass
            else:
                self._results.append(myResult)
                if not quietStatus:
                    print "Processed successfully mass point %s" % mass
        if not quietStatus:
            print
            self._results.print2()
            fname = self._results.saveJson()
            print "Wrote results to %s" % fname
Ejemplo n.º 4
0
def analyseTaskDir(taskDir, scenarioData, scenario, massWhiteList, massPoints):
    myScenario = None
    myTanbeta = None
    mySplit = taskDir.split("_")
    myScenario = mySplit[2]
    if myScenario != scenario:
        # Scenario not requested, do not consider it
        return
    myTanbeta = mySplit[4].replace("tanbetascan", "")
    myList = os.listdir(taskDir)
    for item in myList:
        if item.startswith("higgsCombineobs_") and item.endswith(".root"):
            mySplit = item.split(".")
            myMass = mySplit[len(mySplit) - 2].replace("mH", "")
            if len(massWhiteList) > 0:
                if not myMass in massWhiteList:
                    continue
            if not myMass in massPoints:
                massPoints.append(myMass)
            # Read result
            myResult = commonLimitTools.Result(myMass)
            myStatus = CombineTools.parseResultFromCombineOutput(
                taskDir, myResult, myMass)
            if myStatus != 6:  # 1 obs + 5 exp values
                # Combine failed
                myResult.failed = True
            # Store result
            myKey = tbtools.constructResultKey(myMass, myTanbeta)
            if myKey in scenarioData.keys():
                raise Exception(
                    "Duplicate results for scenario=%s mass=%s tanbeta=%s! Remove wrong ones and then rerun!"
                    % (myScenario, myMass, myTanbeta))
            scenarioData[myKey] = {}
            scenarioData[myKey]["combineResult"] = myResult
            print "Result found for scenario=%s mass=%s tanbeta=%s" % (
                myScenario, myMass, myTanbeta)
Ejemplo n.º 5
0
def main(opts):

    # Apply TDR style
    style = tdrstyle.TDRStyle()
    style.setGridX(opts.gridx)
    style.setGridY(opts.gridy)
    style.setLogX(opts.logx)
    style.setLogY(opts.logy)

    # Create legend and set style
    histograms.createLegend.moveDefaults(dx=-0.1, dh=-0.15)
    histograms.uncertaintyMode.set(histograms.Uncertainty.StatOnly)
    styles.ratioLineStyle.append(styles.StyleLine(lineColor=13))

    # Define some variables
    nameList = []
    allShapeNuisances = []
    signalTable = {}
    myDatacardPattern = ""
    myRootfilePattern = ""

    # Find out the mass points
    if opts.cardPattern == None:
        mySettings = limitTools.GeneralSettings(".", [])
        myDatacardPattern = mySettings.getDatacardPattern(
            limitTools.LimitProcessType.TAUJETS)
        myRootfilePattern = mySettings.getRootfilePattern(
            limitTools.LimitProcessType.TAUJETS)
    else:
        myDatacardPattern = opts.cardPattern.replace("MMM", "M%s").replace(
            "MM", "%s")
        myRootfilePattern = opts.rootfilePattern.replace("MMM", "M%s").replace(
            "MM", "%s")

    # Get mass points to consider
    massPoints = DatacardReader.getMassPointsForDatacardPattern(
        ".", myDatacardPattern)
    Print(
        "The following masses will be considered: %s" %
        (ShellStyles.HighlightAltStyle() + ", ".join(massPoints) +
         ShellStyles.NormalStyle()), True)

    # For-loop: All mass points
    for i, m in enumerate(massPoints, 1):
        # Obtain luminosity from the datacard
        myLuminosity = float(
            limitTools.readLuminosityFromDatacard(".", myDatacardPattern % m))

        # Do the plots
        doPlot(opts, int(m), nameList, allShapeNuisances, myLuminosity,
               myDatacardPattern, myRootfilePattern, signalTable)

    # Print signal table
    Print("Max contracted uncertainty for signal:", True)
    table = []
    align = "{:>15} {:>15} {:>15}"
    hLine = "=" * 50
    table.append(hLine)
    table.append(align.format("Systematic", "Minimum", "Maximum"))
    table.append(hLine)
    # For-loop: All signal
    for i, k in enumerate(signalTable.keys(), 1):
        # Print("Key = %s" % (k), False)
        minVal = "%.3f" % (signalTable[k]["min"])
        maxVal = "%.3f" % (signalTable[k]["max"])
        msg = align.format(k, minVal, maxVal)
        table.append(msg)
    table.append(hLine)
    for row in table:
        Print(row, False)

    msg = "All results under directory %s" % (
        ShellStyles.SuccessStyle() + opts.dirName + ShellStyles.NormalStyle())
    Print(msg, True)

    return
Ejemplo n.º 6
0
def main(opts):
    # Check that input arguments are sufficient
    if opts.settings == None:
        raise Exception(
            ErrorLabel() +
            "Please provide input parameter file with -x or --params !")

    # Load settings
    print "Loading settings:", opts.settings
    if not os.path.exists(opts.settings):
        raise Exception(ErrorLabel() +
                        "Cannot find settings file '%s'!" % opts.settings)
    os.system("python %s" %
              opts.settings)  # Catch any errors in the input datacard
    config = aux.load_module(opts.settings)
    checkSettings(config)
    os.system("cp %s ." % opts.settings)
    myFitSettingsList = config.fitSettings[:]

    # Copy unfitted cards for provenance information if necessary
    if not os.path.exists(_myOriginalDir):
        os.mkdir(_myOriginalDir)
        os.system("mv *.txt %s/." % _myOriginalDir)
        os.system("mv *.root %s/." % _myOriginalDir)
        print "Moved original datacards as provenance info to %s" % _myOriginalDir
    else:
        items = glob.glob("*.txt")
        if len(items):
            os.system("rm *.txt")
        items = glob.glob("*.root")
        if len(items):
            os.system("rm *.root")

    # Find datacards
    myLimitSettings = limitTools.GeneralSettings(_myOriginalDir, [])
    massPoints = myLimitSettings.getMassPoints(
        limitTools.LimitProcessType.TAUJETS)
    myDatacardPattern = myLimitSettings.getDatacardPattern(
        limitTools.LimitProcessType.TAUJETS)
    myRootfilePattern = myLimitSettings.getRootfilePattern(
        limitTools.LimitProcessType.TAUJETS)

    # Loop over mass points
    myDrawPlotsStatus = True
    myShapeSensitivityTestStatus = False
    myShapeSensitivityTestLabel = ""
    for m in massPoints:
        myHistogramCache = []
        # Obtain luminosity
        myLuminosity = float(
            limitTools.readLuminosityFromDatacard(_myOriginalDir, "%s" %
                                                  myDatacardPattern % m))
        # Obtain root file
        myRootFile = ROOT.TFile.Open("%s/%s" %
                                     (_myOriginalDir, myRootfilePattern % m))
        # Obtain datacard
        myOriginalCardFile = open("%s/%s" %
                                  (_myOriginalDir, myDatacardPattern % m))
        myOriginalCardLines = myOriginalCardFile.readlines()
        myOriginalCardFile.close()
        # Obtain column and nuisance info
        myColumnNames = parseColumnNames(myOriginalCardLines)
        myNuisanceInfo = parseNuisanceNames(myOriginalCardLines, myColumnNames)
        myFitParNuisanceInfo = None
        # Treat observation
        hObs = getAndRebinRateHisto("data_obs", myRootFile,
                                    config.finalBinning["shape"])
        myPath = os.getcwd()
        if "SignalInjection" in myPath:
            for k in range(0, hObs.GetNbinsX() + 1):
                hObs.SetBinContent(k, round(hObs.GetBinContent(k)))
            for n in myNuisanceInfo:
                if n["name"] == "observation":
                    n["value"] = hObs.Integral()
        if "SHAPETEST" in myPath:
            hObs.Reset()
            myShapeSensitivityTestStatus = True
            mySplit = myPath.split("_")
            for i in range(0, len(mySplit)):
                if mySplit[i] == "SHAPETEST":
                    myShapeSensitivityTestLabel = "_".join(
                        map(str, mySplit[i + 1:len(mySplit)]))
                    myShapeSensitivityTestLabel = myShapeSensitivityTestLabel.replace(
                        "UP", "Up").replace("DOWN", "Down")
        myHistogramCache.append(hObs)
        # Loop over column names
        for c in myColumnNames:
            print HighlightStyle() + "Processing column %s for mass %s" % (
                c, m) + NormalStyle()
            if _isSignal(c) or c in config.Blacklist:
                print "... skipping fit (signal or blacklist)"
                # Obtain and rebin rate histo
                hRate = getAndRebinRateHisto(c, myRootFile,
                                             config.finalBinning["shape"])
                if _isSignal(c):
                    hRate.Scale(opts.lumiProjection)
                else:
                    hRate.Scale(opts.lumiProjection * opts.bkgxsecProjection)
                myHistogramCache.append(hRate.Clone())
                # Update rate
                for n in myNuisanceInfo:
                    if n["name"] == "rate":
                        n[c] = "%f" % hRate.Integral()
                # Obtain and rebin nuisance histos
                hNuisances = getAndRebinNuisanceHistos(
                    c, myRootFile, myNuisanceInfo,
                    config.finalBinning["shape"])
                for h in hNuisances:
                    if _isSignal(c):
                        h.Scale(opts.lumiProjection)
                    else:
                        h.Scale(opts.lumiProjection * opts.bkgxsecProjection)
                myHistogramCache.extend(hNuisances)
                # Treat QCD shape uncertainty separately (for testing only; one should use the tail fit for QCD)
                #hNuisances = getAndRebinQCDShapeNuisanceHistos(c, myRootFile, hRate, myNuisanceInfo, config.finalBinning["shape"])
                #myHistogramCache.extend(hNuisances)
                # Create bin-by-bin stat. histograms for fitted distribution and update the nuisance table
                myStatHistograms = addBinByBinStatUncert(config,
                                                         c,
                                                         hRate,
                                                         myColumnNames,
                                                         myNuisanceInfo,
                                                         isSignal=True)
                myHistogramCache.extend(myStatHistograms)
                if myShapeSensitivityTestStatus and not _isSignal(c):
                    hObs.Add(hRate)  # Add the fitted histogram
                    # Add the nuisance with proper binning
                    for h in hNuisances:
                        if myShapeSensitivityTestLabel in h.GetName():
                            hObs.Add(h)
                            hObs.Add(hRate, -1.0)
            else:
                # Not signal or blacklist, do fit
                hFineBinning = myRootFile.Get(c + "_fineBinning")
                for i in range(1, hFineBinning.GetNbinsX() + 1):
                    hFineBinning.SetBinContent(
                        i,
                        hFineBinning.GetBinContent(i) * opts.lumiProjection *
                        opts.bkgxsecProjection)
                    hFineBinning.SetBinError(
                        i,
                        hFineBinning.GetBinError(i) / math.sqrt(
                            opts.lumiProjection * opts.bkgxsecProjection))
                if hFineBinning == None:
                    raise Exception(ErrorLabel() +
                                    "Cannot find histogram '%s'!" %
                                    (c + "_fineBinning"))
                hOriginalShape = myRootFile.Get(c)
                hOriginalShape.SetName(hOriginalShape.GetName() + "Original")
                for i in range(1, hOriginalShape.GetNbinsX() + 1):
                    hOriginalShape.SetBinContent(
                        i,
                        hOriginalShape.GetBinContent(i) * opts.lumiProjection *
                        opts.bkgxsecProjection)
                    hOriginalShape.SetBinError(
                        i,
                        hOriginalShape.GetBinError(i) / math.sqrt(
                            opts.lumiProjection * opts.bkgxsecProjection))
                if hOriginalShape == None:
                    raise Exception(ErrorLabel() +
                                    "Cannot find histogram '%s'!" % (c))
                # Proceed
                myFitSettings = None
                for s in myFitSettingsList:
                    if s["id"] in c:
                        myFitSettings = s
                        print "... using fitfunc: %s and range %d-%d" % (
                            s["fitfunc"], s["fitmin"], s["fitmax"])
                if myFitSettings == None:
                    raise Exception(
                        "Could not determine fit function for column '%s'!" %
                        c)
                myFitter = TailFitter.TailFitter(hFineBinning,
                                                 c,
                                                 myFitSettings["fitfunc"],
                                                 myFitSettings["fitmin"],
                                                 myFitSettings["fitmax"],
                                                 myFitSettings["applyFrom"],
                                                 doPlots=myDrawPlotsStatus,
                                                 luminosity=myLuminosity)
                # Obtain fitted rate with final binning
                myFittedRateHistograms = myFitter.getFittedRateHistogram(
                    hFineBinning, config.finalBinning["shape"],
                    myFitSettings["applyFrom"])
                myHistogramCache.extend(myFittedRateHistograms)
                # Update rate
                for n in myNuisanceInfo:
                    if n["name"] == "rate":
                        n[c] = "%f" % myFittedRateHistograms[0].Integral()
                        print "... Updated rate because of fitting %.1f -> %.1f (diff=%f)" % (
                            hFineBinning.Integral(),
                            myFittedRateHistograms[0].Integral(),
                            myFittedRateHistograms[0].Integral() /
                            hFineBinning.Integral())
                # Update all those shape nuisances (update histograms only, no need to touch nuisance table)
                for n in myNuisanceInfo:
                    if n["name"] != "observation" and n[
                            "distribution"] == "shape" and n[c] == "1":
                        #print "... Updating shape nuisance '%s' tail"%n["name"]
                        myUpdatedNuisanceHistograms = updateNuisanceTail(
                            opts, hOriginalShape, myFittedRateHistograms[0],
                            myRootFile, "%s_%s" % (c, n["name"]))
                        myHistogramCache.extend(myUpdatedNuisanceHistograms)
                if myShapeSensitivityTestStatus:
                    # Check if the nuisance histograms exist for this column
                    myHistoName = "%s_%s" % (
                        c, myShapeSensitivityTestLabel.replace(
                            "Up", "").replace("Down", ""))
                    myList = updateNuisanceTail(opts,
                                                hOriginalShape,
                                                myFittedRateHistograms[0],
                                                myRootFile,
                                                myHistoName,
                                                skipNotFoundTest=True)
                    #print "DEBUG",myHistoName,len(myList)
                    if len(myList) > 0:
                        # Add the nuisance with proper binning
                        if "Up" in myShapeSensitivityTestLabel:
                            hObs.Add(myList[0])
                            #print "DEBUG: Added nuisance",myShapeSensitivityTestLabel,myList[0].GetName(),myList[0].Integral()
                        elif "Down" in myShapeSensitivityTestLabel:
                            hObs.Add(myList[1])
                            #print "DEBUG: Added nuisance",myShapeSensitivityTestLabel,myList[1].GetName(),myList[1].Integral()
                        else:
                            raise Exception()  # Should not happen
                    else:
                        hObs.Add(myFittedRateHistograms[0]
                                 )  # Add the fitted histogram
                        #print "DEBUG: Added",myFittedRateHistograms[0].Integral()
                print "Updated shape nuisance tails (rel.uncert. kept constant, but central value changed to the fitted one)"
                # Obtain fit uncertainty histograms and add them to cache
                (huplist, hdownlist) = myFitter.calculateVariationHistograms(
                    config.finalBinning["shape"], myFitSettings["applyFrom"])
                # Plot if requested
                if myDrawPlotsStatus:
                    myArray = array.array("d", config.finalBinning["shape"])
                    hFinalBinning = hFineBinning.Rebin(
                        len(myArray) - 1, "", myArray)
                    myFitter.makeVariationPlotDetailed(
                        "", hFinalBinning, myFittedRateHistograms[0], huplist,
                        hdownlist)
                    (hupTotal,
                     hdownTotal) = myFitter.calculateTotalVariationHistograms(
                         myFittedRateHistograms[0], huplist, hdownlist)
                    myFitter.makeVariationPlotSimple("", hFinalBinning,
                                                     myFittedRateHistograms[0],
                                                     hupTotal, hdownTotal,
                                                     s["fitmin"])
                    # print total uncertainty
                    print "*** Syst. uncert. from fit: +", 1.0 - hupTotal.Integral(
                    ) / myFittedRateHistograms[0].Integral(
                    ), "-", 1.0 - hdownTotal.Integral(
                    ) / myFittedRateHistograms[0].Integral()

                    hFinalBinning.Delete()

                # Treat blancs (norm == 0)
                for i in range(0, len(huplist)):
                    if huplist[i].Integral() == 0:
                        for k in range(0, huplist[i].GetNbinsX() + 2):
                            huplist[i].SetBinContent(
                                k, myFittedRateHistograms[0].GetBinContent(k))
                    if hdownlist[i].Integral() == 0:
                        for k in range(0, hdownlist[i].GetNbinsX() + 2):
                            hdownlist[i].SetBinContent(
                                k, myFittedRateHistograms[0].GetBinContent(k))
                if config.applyFitUncertaintyAsBinByBinUncertainty:
                    # Add fit uncertainty as bin-by-bin type uncertainty
                    (hupTotal,
                     hdownTotal) = myFitter.calculateTotalVariationHistograms(
                         myFittedRateHistograms[0], huplist, hdownlist)
                    (myBinByBinUpHistograms, myBinByBinDownHistograms
                     ) = createBinnedFitUncertaintyHistograms(
                         myFittedRateHistograms[0], hupTotal, hdownTotal,
                         myFitSettings["applyFrom"], opts)
                    if not opts.noFitUncert:
                        myHistogramCache.extend(myBinByBinUpHistograms)
                        myHistogramCache.extend(myBinByBinDownHistograms)
                        # Add fit parameter nuisances to nuisance table
                        for hup in myBinByBinUpHistograms:
                            addNuisanceForIndividualColumn(
                                myColumnNames, myNuisanceInfo, c,
                                hup.GetTitle().replace("%s_%s" % (c, c), c))
                else:
                    # Add fit uncertainty as nuisances parameters
                    if not opts.noFitUncert:
                        myHistogramCache.extend(huplist)
                        myHistogramCache.extend(hdownlist)
                        # Add fit parameter nuisances to nuisance table
                        for hup in huplist:
                            addNuisanceForIndividualColumn(
                                myColumnNames, myNuisanceInfo, c,
                                hup.GetTitle())
                # Create bin-by-bin stat. histograms for fitted distribution and update the nuisance table
                myStatHistograms = addBinByBinStatUncert(
                    config,
                    c,
                    myFittedRateHistograms[0],
                    myColumnNames,
                    myNuisanceInfo,
                    0.0,
                    myFitSettings["applyFrom"],
                    isSignal=False)
                myHistogramCache.extend(myStatHistograms)
                # Clear memory
                hFineBinning.Delete()
                hOriginalShape.Delete()
        for h in myHistogramCache:
            h.SetDirectory(None)
        myRootFile.Close()
        myDrawPlotsStatus = False
        myObsIntegral = hObs.Integral()
        # Print summary info
        printSummaryInfo(myColumnNames, myNuisanceInfo, myHistogramCache, hObs,
                         m, myLuminosity, opts)
        # Histogram cache contains now all root files and nuisance table is now up to date
        # Create new root file and write
        myRootFilename = myRootfilePattern % m
        myRootFile = ROOT.TFile.Open(myRootFilename, "RECREATE")
        if myRootFile == None:
            raise Exception(ErrorLabel() + " Cannot open file '" +
                            myRootFilename + "' for output!")
        for h in myHistogramCache:
            h.SetDirectory(myRootFile)
        myRootFile.Write()
        myRootFile.Close()
        # Create new datacard file and write
        myOutput = createDatacardOutput(myOriginalCardLines, myColumnNames,
                                        myNuisanceInfo, opts, myObsIntegral)
        myFilename = myDatacardPattern % m
        myFile = open(myFilename, "w")
        myFile.write(myOutput)
        myFile.close()
        print "... Generated datacard files %s and %s" % (myFilename,
                                                          myRootFilename)
Ejemplo n.º 7
0
                if "%s_error" % item in masspoints[k]["expected"]:
                    a = float(masspoints[k]["expected"]["%s_error" % item])
                    b = float(masspoints[k]["expected"][item])
                    r = 0.0
                    if b > 0:
                        r = a / b
                    line += " %9.4f" % (r)
                else:
                    line += "      n.a."
            print line
        myFile.close()


if __name__ == "__main__":
    parser = commonLimitTools.createOptionParser(lepDefault=None,
                                                 lhcDefault=False,
                                                 lhcasyDefault=False,
                                                 fullOptions=False)
    parser.add_option("--printonly",
                      dest="printonly",
                      action="store_true",
                      default=False,
                      help="Print only the ready results")
    parser.add_option("--combination",
                      dest="combination",
                      action="store_true",
                      default=False,
                      help="Run combination instead of only taunu fully hadr.")
    opts = commonLimitTools.parseOptionParser(parser)

    # Obtain directory list
    myDirs = opts.dirs[:]
Ejemplo n.º 8
0
  if not os.path.exists(_dirname):
      os.mkdir(_dirname)
  # Apply TDR style
  style = tdrstyle.TDRStyle()
  histograms.createLegend.moveDefaults(dx=-0.1, dh=-0.15)
  histograms.uncertaintyMode.set(histograms.Uncertainty.StatOnly)
  styles.ratioLineStyle.append(styles.StyleLine(lineColor=13))
  # Find out the mass points
 
  nameList = []
  allShapeNuisances = []
  signalTable = {}
  myDatacardPattern = ""
  myRootfilePattern = ""
  if opts.cardPattern == None:
      mySettings = limitTools.GeneralSettings(".",[])
      myDatacardPattern = mySettings.getDatacardPattern(limitTools.LimitProcessType.TAUJETS)
      myRootfilePattern = mySettings.getRootfilePattern(limitTools.LimitProcessType.TAUJETS)
  else:
      myDatacardPattern = opts.cardPattern.replace("MMM","M%s").replace("MM","%s")
      myRootfilePattern = opts.rootfilePattern.replace("MMM","M%s").replace("MM","%s")
  massPoints = DatacardReader.getMassPointsForDatacardPattern(".", myDatacardPattern)
  print "The following masses are considered:",massPoints
  for m in massPoints:
      # Obtain luminosity from datacard
      myLuminosity = float(limitTools.readLuminosityFromDatacard(".",myDatacardPattern%m))
      # Do plots
      doPlot(opts,int(m),nameList,allShapeNuisances,myLuminosity,myDatacardPattern,myRootfilePattern,signalTable)
  # Print signal table
  print "Max contracted uncertainty for signal:"
  for k in signalTable.keys():
def main(opts):
    # Check that input arguments are sufficient
    if opts.settings == None:
        raise Exception(ErrorLabel()+"Please provide input parameter file with -x or --params !")

    # Load settings
    print "Loading settings:",opts.settings
    if not os.path.exists(opts.settings):
        raise Exception(ErrorLabel()+"Cannot find settings file '%s'!"%opts.settings)
    os.system("python %s"%opts.settings) # Catch any errors in the input datacard
    config = aux.load_module(opts.settings)
    checkSettings(config)
    os.system("cp %s ."%opts.settings)
    myFitSettingsList = config.fitSettings[:]

    # Copy unfitted cards for provenance information if necessary
    if not os.path.exists(_myOriginalDir):
        os.mkdir(_myOriginalDir)
        os.system("mv *.txt %s/."%_myOriginalDir)
        os.system("mv *.root %s/."%_myOriginalDir)
        print "Moved original datacards as provenance info to %s"%_myOriginalDir
    else:
        items = glob.glob("*.txt")
        if len(items):
            os.system("rm *.txt")
        items = glob.glob("*.root")
        if len(items):
            os.system("rm *.root")

    # Find datacards
    myLimitSettings = limitTools.GeneralSettings(_myOriginalDir,[])
    massPoints = myLimitSettings.getMassPoints(limitTools.LimitProcessType.TAUJETS)
    myDatacardPattern = myLimitSettings.getDatacardPattern(limitTools.LimitProcessType.TAUJETS)
    myRootfilePattern = myLimitSettings.getRootfilePattern(limitTools.LimitProcessType.TAUJETS)

    # Loop over mass points
    myDrawPlotsStatus = True
    myShapeSensitivityTestStatus = False
    myShapeSensitivityTestLabel = ""
    for m in massPoints:
        myHistogramCache = []
        # Obtain luminosity
        myLuminosity = float(limitTools.readLuminosityFromDatacard(_myOriginalDir, "%s"%myDatacardPattern%m))
        # Obtain root file
        myRootFile = ROOT.TFile.Open("%s/%s"%(_myOriginalDir, myRootfilePattern%m))
        # Obtain datacard
        myOriginalCardFile = open("%s/%s"%(_myOriginalDir, myDatacardPattern%m))
        myOriginalCardLines = myOriginalCardFile.readlines()
        myOriginalCardFile.close()
        # Obtain column and nuisance info
        myColumnNames = parseColumnNames(myOriginalCardLines)
        myNuisanceInfo = parseNuisanceNames(myOriginalCardLines, myColumnNames)
        myFitParNuisanceInfo = None
        # Treat observation
        hObs = getAndRebinRateHisto("data_obs", myRootFile, config.finalBinning["shape"])
        myPath = os.getcwd()
        if "SignalInjection" in myPath:
            for k in range (0, hObs.GetNbinsX()+1):
                hObs.SetBinContent(k, round(hObs.GetBinContent(k)))
            for n in myNuisanceInfo:
                if n["name"] == "observation":
                    n["value"] = hObs.Integral()
        if "SHAPETEST" in myPath:
            hObs.Reset()
            myShapeSensitivityTestStatus = True
            mySplit = myPath.split("_")
            for i in range(0,len(mySplit)):
                if mySplit[i] == "SHAPETEST":
                    myShapeSensitivityTestLabel = "_".join(map(str,mySplit[i+1:len(mySplit)]))
                    myShapeSensitivityTestLabel = myShapeSensitivityTestLabel.replace("UP","Up").replace("DOWN","Down")
        myHistogramCache.append(hObs)
        # Loop over column names
        for c in myColumnNames:
            print HighlightStyle()+"Processing column %s for mass %s"%(c,m)+NormalStyle()
            if _isSignal(c) or c in config.Blacklist:
                print "... skipping fit (signal or blacklist)"
                # Obtain and rebin rate histo
                hRate = getAndRebinRateHisto(c, myRootFile, config.finalBinning["shape"])
                if _isSignal(c):
                    hRate.Scale(opts.lumiProjection)
                else:
                    hRate.Scale(opts.lumiProjection*opts.bkgxsecProjection)
                myHistogramCache.append(hRate.Clone())
                # Update rate
                for n in myNuisanceInfo:
                    if n["name"] == "rate":
                        n[c] = "%f"%hRate.Integral()
                # Obtain and rebin nuisance histos
                hNuisances = getAndRebinNuisanceHistos(c, myRootFile, myNuisanceInfo, config.finalBinning["shape"])
                for h in hNuisances:
                    if _isSignal(c):
                        h.Scale(opts.lumiProjection)
                    else:
                        h.Scale(opts.lumiProjection*opts.bkgxsecProjection)
                myHistogramCache.extend(hNuisances)
                # Treat QCD shape uncertainty separately (for testing only; one should use the tail fit for QCD)
                #hNuisances = getAndRebinQCDShapeNuisanceHistos(c, myRootFile, hRate, myNuisanceInfo, config.finalBinning["shape"])
                #myHistogramCache.extend(hNuisances)
                # Create bin-by-bin stat. histograms for fitted distribution and update the nuisance table
                myStatHistograms = addBinByBinStatUncert(config, c, hRate, myColumnNames, myNuisanceInfo, isSignal=True)
                myHistogramCache.extend(myStatHistograms)
                if myShapeSensitivityTestStatus and not _isSignal(c):
                    hObs.Add(hRate) # Add the fitted histogram
                    # Add the nuisance with proper binning
                    for h in hNuisances:
                        if myShapeSensitivityTestLabel in h.GetName():
                            hObs.Add(h)
                            hObs.Add(hRate, -1.0)
            else:
                # Not signal or blacklist, do fit
                hFineBinning = myRootFile.Get(c+"_fineBinning")
                for i in range(1, hFineBinning.GetNbinsX()+1):
                    hFineBinning.SetBinContent(i, hFineBinning.GetBinContent(i)*opts.lumiProjection*opts.bkgxsecProjection)
                    hFineBinning.SetBinError(i, hFineBinning.GetBinError(i)/math.sqrt(opts.lumiProjection*opts.bkgxsecProjection))
                if hFineBinning == None:
                    raise Exception(ErrorLabel()+"Cannot find histogram '%s'!"%(c+"_fineBinning"))
                hOriginalShape = myRootFile.Get(c)
                hOriginalShape.SetName(hOriginalShape.GetName()+"Original")
                for i in range(1, hOriginalShape.GetNbinsX()+1):
                    hOriginalShape.SetBinContent(i, hOriginalShape.GetBinContent(i)*opts.lumiProjection*opts.bkgxsecProjection)
                    hOriginalShape.SetBinError(i, hOriginalShape.GetBinError(i)/math.sqrt(opts.lumiProjection*opts.bkgxsecProjection))
                if hOriginalShape == None:
                    raise Exception(ErrorLabel()+"Cannot find histogram '%s'!"%(c))
                # Proceed
                myFitSettings = None
                for s in myFitSettingsList:
                    if s["id"] in c:
                        myFitSettings = s
                        print "... using fitfunc: %s and range %d-%d"%(s["fitfunc"],s["fitmin"],s["fitmax"])
                if myFitSettings == None:
                    raise Exception("Could not determine fit function for column '%s'!"%c)
                myFitter = TailFitter.TailFitter(hFineBinning, c, myFitSettings["fitfunc"], myFitSettings["fitmin"], myFitSettings["fitmax"], myFitSettings["applyFrom"], doPlots=myDrawPlotsStatus, luminosity=myLuminosity)
                # Obtain fitted rate with final binning
                myFittedRateHistograms = myFitter.getFittedRateHistogram(hFineBinning, config.finalBinning["shape"], myFitSettings["applyFrom"])
                myHistogramCache.extend(myFittedRateHistograms)
                # Update rate
                for n in myNuisanceInfo:
                    if n["name"] == "rate":
                        n[c] = "%f"%myFittedRateHistograms[0].Integral()
                        print "... Updated rate because of fitting %.1f -> %.1f (diff=%f)"%(hFineBinning.Integral(), myFittedRateHistograms[0].Integral(), myFittedRateHistograms[0].Integral()/hFineBinning.Integral())
                # Update all those shape nuisances (update histograms only, no need to touch nuisance table)
                for n in myNuisanceInfo:
                    if n["name"] != "observation" and n["distribution"] == "shape" and n[c] == "1":
                        #print "... Updating shape nuisance '%s' tail"%n["name"]
                        myUpdatedNuisanceHistograms = updateNuisanceTail(opts, hOriginalShape, myFittedRateHistograms[0], myRootFile, "%s_%s"%(c,n["name"]))
                        myHistogramCache.extend(myUpdatedNuisanceHistograms)
                if myShapeSensitivityTestStatus:
                    # Check if the nuisance histograms exist for this column
                    myHistoName = "%s_%s"%(c,myShapeSensitivityTestLabel.replace("Up","").replace("Down",""))
                    myList = updateNuisanceTail(opts, hOriginalShape, myFittedRateHistograms[0], myRootFile, myHistoName, skipNotFoundTest=True)
                    #print "DEBUG",myHistoName,len(myList)
                    if len(myList) > 0:
                        # Add the nuisance with proper binning
                        if "Up" in myShapeSensitivityTestLabel:
                            hObs.Add(myList[0])
                            #print "DEBUG: Added nuisance",myShapeSensitivityTestLabel,myList[0].GetName(),myList[0].Integral()
                        elif "Down" in myShapeSensitivityTestLabel:
                            hObs.Add(myList[1])
                            #print "DEBUG: Added nuisance",myShapeSensitivityTestLabel,myList[1].GetName(),myList[1].Integral()
                        else:
                            raise Exception() # Should not happen
                    else:
                        hObs.Add(myFittedRateHistograms[0]) # Add the fitted histogram
                        #print "DEBUG: Added",myFittedRateHistograms[0].Integral()
                print "Updated shape nuisance tails (rel.uncert. kept constant, but central value changed to the fitted one)"
                # Obtain fit uncertainty histograms and add them to cache
                (huplist, hdownlist) = myFitter.calculateVariationHistograms(config.finalBinning["shape"], myFitSettings["applyFrom"])
                # Plot if requested
                if myDrawPlotsStatus:
                    myArray = array.array("d",config.finalBinning["shape"])
                    hFinalBinning = hFineBinning.Rebin(len(myArray)-1, "", myArray)
                    myFitter.makeVariationPlotWithSeparateUncertainties("", hFinalBinning, myFittedRateHistograms[0], huplist, hdownlist, myFitSettings["applyFrom"])
                    (hupTotal, hdownTotal) = myFitter.calculateTotalVariationHistograms(myFittedRateHistograms[0], huplist, hdownlist)
                    myFitter.makeVariationPlotWithTotalUncertainties("", hFinalBinning, myFittedRateHistograms[0], hupTotal, hdownTotal, myFitSettings["applyFrom"])
                    # print total uncertainty
                    print "*** Syst. uncert. from fit: +",1.0-hupTotal.Integral()/myFittedRateHistograms[0].Integral(), "-", 1.0-hdownTotal.Integral()/myFittedRateHistograms[0].Integral()

                    hFinalBinning.Delete()

                # Treat blancs (norm == 0)
                for i in range(0, len(huplist)):
                    if huplist[i].Integral() == 0:
                        for k in range(0, huplist[i].GetNbinsX()+2):
                            huplist[i].SetBinContent(k, myFittedRateHistograms[0].GetBinContent(k))
                    if hdownlist[i].Integral() == 0:
                        for k in range(0, hdownlist[i].GetNbinsX()+2):
                            hdownlist[i].SetBinContent(k, myFittedRateHistograms[0].GetBinContent(k))
                if config.applyFitUncertaintyAsBinByBinUncertainty:
                    # Add fit uncertainty as bin-by-bin type uncertainty
                    (hupTotal, hdownTotal) = myFitter.calculateTotalVariationHistograms(myFittedRateHistograms[0], huplist, hdownlist)
                    (myBinByBinUpHistograms, myBinByBinDownHistograms) = createBinnedFitUncertaintyHistograms(myFittedRateHistograms[0], hupTotal, hdownTotal, myFitSettings["applyFrom"], opts)
                    if not opts.noFitUncert:
                        myHistogramCache.extend(myBinByBinUpHistograms)
                        myHistogramCache.extend(myBinByBinDownHistograms)
                        # Add fit parameter nuisances to nuisance table
                        for hup in myBinByBinUpHistograms:
                            addNuisanceForIndividualColumn(myColumnNames,myNuisanceInfo,c,hup.GetTitle().replace("%s_%s"%(c,c),c))
                else:
                    # Add fit uncertainty as nuisances parameters
                    if not opts.noFitUncert:
                        myHistogramCache.extend(huplist)
                        myHistogramCache.extend(hdownlist)
                        # Add fit parameter nuisances to nuisance table
                        for hup in huplist:
                            addNuisanceForIndividualColumn(myColumnNames,myNuisanceInfo,c,hup.GetTitle())
                # Create bin-by-bin stat. histograms for fitted distribution and update the nuisance table
                myStatHistograms = addBinByBinStatUncert(config, c, myFittedRateHistograms[0], myColumnNames, myNuisanceInfo, 0.0, myFitSettings["applyFrom"], isSignal=False)
                myHistogramCache.extend(myStatHistograms)
                # Clear memory
                hFineBinning.Delete()
                hOriginalShape.Delete()
        for h in myHistogramCache:
            h.SetDirectory(None)
        myRootFile.Close()
        myDrawPlotsStatus = False
        myObsIntegral = hObs.Integral()
        # Print summary info
        printSummaryInfo(myColumnNames, myNuisanceInfo, myHistogramCache, hObs, m, myLuminosity, opts)
        # Histogram cache contains now all root files and nuisance table is now up to date
        # Create new root file and write
        myRootFilename = myRootfilePattern%m
        myRootFile = ROOT.TFile.Open(myRootFilename, "RECREATE")
        if myRootFile == None:
            raise Exception(ErrorLabel()+" Cannot open file '"+myRootFilename+"' for output!")
        for h in myHistogramCache:
            h.SetDirectory(myRootFile)
        myRootFile.Write()
        myRootFile.Close()
        # Create new datacard file and write
        myOutput = createDatacardOutput(myOriginalCardLines, myColumnNames, myNuisanceInfo, opts, myObsIntegral)
        myFilename = myDatacardPattern%m
        myFile = open(myFilename, "w")
        myFile.write(myOutput)
        myFile.close()
        print "... Generated datacard files %s and %s"%(myFilename, myRootFilename)
Ejemplo n.º 10
0
def createOptionParser(lepDefault=None, lhcDefault=None, lhcasyDefault=None):
    return commonLimitTools.createOptionParser(lepDefault, lhcDefault,
                                               lhcasyDefault)
Ejemplo n.º 11
0
                    settings.getDatacardPattern(
                        commonLimitTools.LimitProcessType.TAUJETS)
                ],
                rootfilePatterns=[
                    settings.getRootfilePattern(
                        commonLimitTools.LimitProcessType.TAUJETS)
                ],
                clsType=combine.LHCTypeAsymptotic(opts),
                postfix=pfix)
        else:
            return False
    return True


if __name__ == "__main__":
    parser = commonLimitTools.createOptionParser(lepType, lhcType,
                                                 lhcTypeAsymptotic)
    parser.add_option("--postfix",
                      dest="postfix",
                      default="",
                      help="Postfix to add to output directory name")
    opts = commonLimitTools.parseOptionParser(parser)
    # General settings

    myDirs = opts.dirs[:]
    if len(myDirs) == 0:
        myDirs.append(".")

    for myDir in myDirs:
        print "Considering directory:", myDir
        settings = commonLimitTools.GeneralSettings(myDir, opts.masspoints)
        print "The following masses are considered:", settings.getMassPoints(
if __name__ == "__main__":
    def addToDatacards(myDir, massPoints, dataCardList, rootFileList, dataCardPattern, rootFilePattern):
        if rootFilePattern != None and "%s" in dataCardPattern:
            m = DatacardReader.getMassPointsForDatacardPattern(myDir, dataCardPattern)
            if len(m) > 0:
                m = DatacardReader.getMassPointsForDatacardPattern(myDir, dataCardPattern, massPoints)
                del massPoints[:]
                massPoints.extend(m)
                dataCardList.append(dataCardPattern)
                rootFileList.append(rootFilePattern)
        else:
            if os.path.exists(dataCardPattern):
                dataCardList.append(dataCardPattern)
                rootFileList.append(rootFilePattern)

    parser = commonLimitTools.createOptionParser(lepType, lhcType, lhcTypeAsymptotic)
    opts = commonLimitTools.parseOptionParser(parser)
    # General settings

    myDirs = opts.dirs[:]
    if len(myDirs) == 0:
        myDirs.append(".")

    for myDir in myDirs:
        print "Considering directory:",myDir
        datacardPatterns = []
        rootFilePatterns = []
        myMassPoints = []
        if len(opts.masspoints) > 0:
            myMassPoints = opts.masspoints[:]
        # taunu, tau+jets final state
Ejemplo n.º 13
0
    def _createInjection(self, mass, datacardFiles):
        if not self.brlimit:
            raise Exception(
                "Signal injection supported only for brlimit for now")
        if len(datacardFiles) != 1:
            raise Exception(
                "Signal injection supported only for one datacard for now (got %d)"
                % len(datacardFiles))
        if len(self.multicrabConfiguration["rootfiles"]) != 1:
            raise Exception(
                "Signal injection supported only for one root file for now (got %d)"
                % len(self.configuration["rootfiles"]))

        fileName = "runCombine_LHCasy_SignalInjected_m" + mass

        shutil.copy(
            os.path.join(os.environ["CMSSW_BASE"], "bin",
                         os.environ["SCRAM_ARCH"], "combine"), self.dirname)
        shutil.copy(
            os.path.join(os.environ["CMSSW_BASE"], "bin",
                         os.environ["SCRAM_ARCH"], "text2workspace.py"),
            self.dirname)
        shutil.copy(
            os.path.join(os.environ["CMSSW_BASE"], "src", "HiggsAnalysis",
                         "NtupleAnalysis", "scripts",
                         "combineInjectSignalLight.py"), self.dirname)
        tar = tarfile.open(os.path.join(self.dirname, "python.tar.gz"),
                           mode="w:gz",
                           dereference=True)
        tar.add(os.path.join(os.environ["CMSSW_BASE"], "python"),
                arcname="python")
        tar.close()

        datacard = datacardFiles[0]
        rootfile = self.multicrabConfiguration["rootfiles"][0] % mass
        rootfileSignal = self.multicrabConfiguration["rootfiles"][
            0] % self.opts.injectSignalMass

        rfs = ""
        if rootfileSignal != rootfile:
            rfs = rootfileSignal

        command = """
#!/bin/bash

SEED_START=1
NUMBER_OF_ITERATIONS={NTOYS}

if [ $# -ge 1 ]; then
    SEED_START=$(($1 * 10000))
fi

tar zxf python.tar.gz
export PYTHONPATH=$PWD/python:$PYTHONPATH

if [ ! -d original ]; then
    mkdir original
    mv {DATACARD} {ROOTFILE} {ROOTFILESIGNAL_OR_EMPTY} original
fi

function runcombine {{
    ./combineInjectSignalLight.py --inputDatacard original/{DATACARD} --inputRoot original/{ROOTFILE} --inputRootSignal original/{ROOTFILESIGNAL} --outputDatacard {DATACARD} --outputRoot {ROOTFILE} --brtop {BRTOP} --brh {BRHPLUS} -s $1
    ./text2workspace.py ./{DATACARD} -P HiggsAnalysis.CombinedLimit.ChargedHiggs:brChargedHiggs -o workspaceM{MASS}.root
#    combine  -M Asymptotic --picky -v 2 --rAbsAcc 0.00001 --rMin 0 --rMax 1.0 -m {MASS} -n obs_m{MASS} -d workspaceM{MASS}.root
    ./combine {OPTS} --rMin {RMIN} --rMax {RMAX} -m {MASS} -n inj_m{MASS} -d workspaceM{MASS}.root
    mv higgsCombineinj_m{MASS}.Asymptotic.mH{MASS}.root higgsCombineinj_m{MASS}.Asymptotic.mH{MASS}.seed$1.root
}}


for ((I=0; I<$NUMBER_OF_ITERATIONS; I++)); do
    runcombine $(($SEED_START+$I))
done

hadd higgsCombineinj_m{MASS}.Asymptotic.mH{MASS}.root higgsCombineinj_m{MASS}.Asymptotic.mH{MASS}.seed*.root

""".format(
            DATACARD=datacard,
            ROOTFILE=rootfile,
            ROOTFILESIGNAL=rootfileSignal,
            ROOTFILESIGNAL_OR_EMPTY=rfs,
            BRTOP=self.opts.injectSignalBRTop,
            BRHPLUS=self.opts.injectSignalBRHplus,
            NTOYS=self.opts.injectNumberToys,
            MASS=mass,
            OPTS=self.optionsObservedAndExpected.getValue(mass),
            RMIN=self.rMin.getValue(mass),
            RMAX=self.rMax.getValue(mass),
        )

        if "signalInjection" not in self.configuration:
            self.configuration["signalInjection"] = {
                "mass": self.opts.injectSignalMass,
                "brTop": self.opts.injectSignalBRTop,
                "brHplus": self.opts.injectSignalBRHplus
            }
        if not os.path.exists(os.path.join(self.dirname, "limits.json")):
            # Insert luminosity to limits.json already here
            limits = {
                "luminosity":
                commonLimitTools.readLuminosityFromDatacard(
                    self.dirname, datacard)
            }
            f = open(os.path.join(self.dirname, "limits.json"), "w")
            json.dump(limits, f, sort_keys=True, indent=2)
            f.close()

        aux.writeScript(os.path.join(self.dirname, fileName), command)
        self.signalInjectionScripts[mass] = fileName
def main(opts, settings, myDir):
    postfix = "taujets"

#    lepType = True
#    lhcType = True
    lhcTypeAsymptotic = True

    crabScheduler = "arc"
    crabOptions = {
#        "GRID": [
#            "ce_white_list = jade-cms.hip.fi",
#            "ce_white_list = korundi.grid.helsinki.fi",
#            ]
        }
    if settings.isLands():
        if opts.lepType:
            lands.generateMultiCrab(
                opts,
                myDir,
                massPoints = settings.getMassPoints(commonLimitTools.LimitProcessType.TAUJETS),
                datacardPatterns = [settings.getDatacardPattern(commonLimitTools.LimitProcessType.TAUJETS)],
                rootfilePatterns = [settings.getRootfilePattern(commonLimitTools.LimitProcessType.TAUJETS)],
                clsType = lands.LEPType(opts.brlimit,opts.sigmabrlimit,toysPerJob=100,firstSeed=settings.getFirstSeed()),
                numberOfJobs = 10,
                postfix = postfix+"_lep_toys1k",
                crabScheduler=crabScheduler, crabOptions=crabOptions)
        elif opts.lhcType:
            myVR = None
            if opts.brlimit:
                if commonLimitTools.isHeavyHiggs(settings.getMassPoints(commonLimitTools.LimitProcessType.TAUJETS)):
                    raise Exception("Error: --brlimit is not available for heavy H+! Please use --sigmabrlimit !")
                myVR = {"default": None,
                    # Initially obtained from asymp. limit as min/max of +-2 sigma and observed
                    # After that, with trial and error of hybrid limit (e.g. by looking plot*.gif plots)
                    # Light H+, values calibrated with 2011A, met>50, loose delta phi
                    "80":  ("0.001",  "0.08", "x1.05"), 
                    "90":  ("0.001",  "0.07", "x1.05"), 
                    "100": ("0.001", "0.06", "x1.05"), 
                    "120": ("0.0005", "0.04", "x1.05"), 
                    "140": ("0.0001", "0.03", "x1.05"), 
                    "150": ("0.0001", "0.025", "x1.03"),
                    "155": ("0.0001", "0.02", "x1.03"),
                    "160": ("0.0001", "0.02", "x1.03"), 
                }
            if opts.sigmabrlimit:
                myVR = {"default": None,
                    # Initially obtained from asymp. limit as min/max of +-2 sigma and observed
                    # After that, with trial and error of hybrid limit (e.g. by looking plot*.gif plots)
                    # Light H+, values calibrated with 2011A, met>50, loose delta phi
                    "80":  ("1",  "20", "x1.05"),
                    "90":  ("1",  "20", "x1.05"),
                    "100": ("0.5", "20", "x1.05"),
                    "120": ("0.5", "20", "x1.05"),
                    "140": ("0.5", "10", "x1.05"),
                    "150": ("0.1", "10", "x1.03"),
                    "155": ("0.1", "10", "x1.03"),
                    "160": ("0.1", "10", "x1.03"),
                    "180": ("0.01", "5", "x1.05"),
                    "190": ("0.01", "5", "x1.05"),
                    "200": ("0.01", "5", "x1.05"),
                    "220": ("0.01", "5", "x1.03"),
                    "250": ("0.01", "5", "x1.03"),
                    "300": ("0.01", "5", "x1.03"),
                    "350": ("0.01", "5", "x1.03"),
                    "400": ("0.005", "1", "x1.03"),
                    "500": ("0.005", "1", "x1.03"),
                    "600": ("0.005", "1", "x1.03"),
                }
            lands.generateMultiCrab(
                opts,
                myDir,
                massPoints = settings.getMassPoints(commonLimitTools.LimitProcessType.TAUJETS),
                datacardPatterns = [settings.getDatacardPattern(commonLimitTools.LimitProcessType.TAUJETS)],
                rootfilePatterns = [settings.getRootfilePattern(commonLimitTools.LimitProcessType.TAUJETS)],
                clsType = lands.LHCType(opts.brlimit,opts.sigmabrlimit,toysCLsb=_ntoysCLsb(),toysCLb=_ntoysCLb(),firstSeed=settings.getFirstSeed(),vR=myVR),
                numberOfJobs = _njobs(),
                postfix = postfix+"_lhc_jobs160_sb150_b75",
                crabScheduler=crabScheduler, crabOptions=crabOptions)
        elif opts.lhcTypeAsymptotic:
            lands.produceLHCAsymptotic(
                opts,
                myDir,
                massPoints = settings.getMassPoints(commonLimitTools.LimitProcessType.TAUJETS),
                datacardPatterns = [settings.getDatacardPattern(commonLimitTools.LimitProcessType.TAUJETS)],
                rootfilePatterns = [settings.getRootfilePattern(commonLimitTools.LimitProcessType.TAUJETS)],
                clsType = lands.LHCTypeAsymptotic(opts.brlimit,opts.sigmabrlimit),
                postfix = postfix+"_lhcasy"
                )
        else:
            return False
    elif settings.isCombine():
        if opts.lepType:
            raise Exception("LEP type Hybrid CLs not implemented yet for combine")
        elif opts.lhcType:
            raise Exception("LHC type Hybrid CLs not implemented yet for combine")
        elif opts.lhcTypeAsymptotic:
            pfix = postfix #+"_lhcasy"
            if opts.postfix != "":
                pfix += "_"+opts.postfix
            combine.produceLHCAsymptotic(
                opts,
                myDir,
                massPoints = settings.getMassPoints(commonLimitTools.LimitProcessType.TAUJETS),
                datacardPatterns = [settings.getDatacardPattern(commonLimitTools.LimitProcessType.TAUJETS)],
                rootfilePatterns = [settings.getRootfilePattern(commonLimitTools.LimitProcessType.TAUJETS)],
                clsType = combine.LHCTypeAsymptotic(opts),
                postfix = pfix
                )
        else:
            return False
    return True
Ejemplo n.º 15
0
def parseOptionParser(parser):
    commonLimitTools.parseOptionParser(parser)
Ejemplo n.º 16
0
def createOptionParser(lepDefault=None, lhcDefault=None, lhcasyDefault=None):
    return commonLimitTools.createOptionParser(lepDefault, lhcDefault, lhcasyDefault)
Ejemplo n.º 17
0
    def _createInjection(self, mass, datacardFiles):
        if not self.brlimit:
            raise Exception("Signal injection supported only for brlimit for now")
        if len(datacardFiles) != 1:
            raise Exception("Signal injection supported only for one datacard for now (got %d)" % len(datacardFiles))
        if len(self.multicrabConfiguration["rootfiles"]) != 1:
            raise Exception("Signal injection supported only for one root file for now (got %d)" % len(self.configuration["rootfiles"]))

        fileName = "runCombine_LHCasy_SignalInjected_m" + mass

        shutil.copy(os.path.join(os.environ["CMSSW_BASE"], "bin", os.environ["SCRAM_ARCH"], "combine"), self.dirname)
        shutil.copy(os.path.join(os.environ["CMSSW_BASE"], "bin", os.environ["SCRAM_ARCH"], "text2workspace.py"), self.dirname)
        shutil.copy(os.path.join(os.environ["CMSSW_BASE"], "src", "HiggsAnalysis", "NtupleAnalysis", "scripts", "combineInjectSignalLight.py"), self.dirname)
        tar = tarfile.open(os.path.join(self.dirname, "python.tar.gz"), mode="w:gz", dereference=True)
        tar.add(os.path.join(os.environ["CMSSW_BASE"], "python"), arcname="python")
        tar.close()

        datacard = datacardFiles[0]
        rootfile = self.multicrabConfiguration["rootfiles"][0] % mass
        rootfileSignal = self.multicrabConfiguration["rootfiles"][0] % self.opts.injectSignalMass

        rfs = ""
        if rootfileSignal != rootfile:
            rfs = rootfileSignal

        command = """
#!/bin/bash

SEED_START=1
NUMBER_OF_ITERATIONS={NTOYS}

if [ $# -ge 1 ]; then
    SEED_START=$(($1 * 10000))
fi

tar zxf python.tar.gz
export PYTHONPATH=$PWD/python:$PYTHONPATH

if [ ! -d original ]; then
    mkdir original
    mv {DATACARD} {ROOTFILE} {ROOTFILESIGNAL_OR_EMPTY} original
fi

function runcombine {{
    ./combineInjectSignalLight.py --inputDatacard original/{DATACARD} --inputRoot original/{ROOTFILE} --inputRootSignal original/{ROOTFILESIGNAL} --outputDatacard {DATACARD} --outputRoot {ROOTFILE} --brtop {BRTOP} --brh {BRHPLUS} -s $1
    ./text2workspace.py ./{DATACARD} -P HiggsAnalysis.CombinedLimit.ChargedHiggs:brChargedHiggs -o workspaceM{MASS}.root
#    combine  -M Asymptotic --picky -v 2 --rAbsAcc 0.00001 --rMin 0 --rMax 1.0 -m {MASS} -n obs_m{MASS} -d workspaceM{MASS}.root
    ./combine {OPTS} --rMin {RMIN} --rMax {RMAX} -m {MASS} -n inj_m{MASS} -d workspaceM{MASS}.root
    mv higgsCombineinj_m{MASS}.Asymptotic.mH{MASS}.root higgsCombineinj_m{MASS}.Asymptotic.mH{MASS}.seed$1.root
}}


for ((I=0; I<$NUMBER_OF_ITERATIONS; I++)); do
    runcombine $(($SEED_START+$I))
done

hadd higgsCombineinj_m{MASS}.Asymptotic.mH{MASS}.root higgsCombineinj_m{MASS}.Asymptotic.mH{MASS}.seed*.root

""".format(
    DATACARD=datacard, ROOTFILE=rootfile, ROOTFILESIGNAL=rootfileSignal, ROOTFILESIGNAL_OR_EMPTY=rfs,
    BRTOP=self.opts.injectSignalBRTop, BRHPLUS=self.opts.injectSignalBRHplus,
    NTOYS=self.opts.injectNumberToys, MASS=mass,
    OPTS=self.optionsObservedAndExpected.getValue(mass),
    RMIN=self.rMin.getValue(mass),
    RMAX=self.rMax.getValue(mass),
)

        if "signalInjection" not in self.configuration:
            self.configuration["signalInjection"] = {
                "mass": self.opts.injectSignalMass,
                "brTop": self.opts.injectSignalBRTop,
                "brHplus": self.opts.injectSignalBRHplus
            }
        if not os.path.exists(os.path.join(self.dirname, "limits.json")):
            # Insert luminosity to limits.json already here
            limits = {"luminosity": commonLimitTools.readLuminosityFromDatacard(self.dirname, datacard)}
            f = open(os.path.join(self.dirname, "limits.json"), "w")
            json.dump(limits, f, sort_keys=True, indent=2)
            f.close()

        aux.writeScript(os.path.join(self.dirname, fileName), command)
        self.signalInjectionScripts[mass] = fileName
Ejemplo n.º 18
0
                line += " %9.5f"%(float(masspoints[k]["expected"][item]))
            for item in myKeys:
                if "%s_error"%item in masspoints[k]["expected"]:
                    a = float(masspoints[k]["expected"]["%s_error"%item])
                    b = float(masspoints[k]["expected"][item])
                    r = 0.0
                    if b > 0:
                        r = a/b
                    line += " %9.4f"%(r)
                else:
                    line += "      n.a."
            print line
        myFile.close()

if __name__ == "__main__":
    parser = commonLimitTools.createOptionParser(lepDefault=None, lhcDefault=False, lhcasyDefault=False, fullOptions=False)
    parser.add_option("--printonly", dest="printonly", action="store_true", default=False, help="Print only the ready results")
    parser.add_option("--combination", dest="combination", action="store_true", default=False, help="Run combination instead of only taunu fully hadr.")
    opts = commonLimitTools.parseOptionParser(parser)

    # Obtain directory list
    myDirs = opts.dirs[:]
    if len(myDirs) == 0 or (len(myDirs) == 1 and myDirs[0] == "."):
        myDirs = []
        for dirname, dirnames, filenames in os.walk('.'):
            for subdirname in dirnames:
                #if "LandSMultiCrab" in subdirname:
                if "datacards_" in subdirname:
                    myDirs.append(os.path.join(dirname, subdirname))
        if len(myDirs) == 0:
            raise Exception("Error: Could not find any sub directories starting with 'datacards_' below this directory!")
Ejemplo n.º 19
0
    style = tdrstyle.TDRStyle()
    histograms.createLegend.moveDefaults(dx=-0.1, dh=-0.15)
    histograms.uncertaintyMode.set(histograms.Uncertainty.StatOnly)
    styles.ratioLineStyle.append(styles.StyleLine(lineColor=13))
    # Find out the mass points
   
    nameList = []
    allShapeNuisances = []
    signalTable = {}
    myDatacardPattern = ""
    myRootfilePattern = ""
    if opts.cardPattern == None:
        mySettings = limitTools.GeneralSettings(".",[])
        myDatacardPattern = mySettings.getDatacardPattern(limitTools.LimitProcessType.TAUJETS)
        myRootfilePattern = mySettings.getRootfilePattern(limitTools.LimitProcessType.TAUJETS)
    else:
        myDatacardPattern = opts.cardPattern.replace("MMM","M%s").replace("MM","%s")
        myRootfilePattern = opts.rootfilePattern.replace("MMM","M%s").replace("MM","%s")
    massPoints = DatacardReader.getMassPointsForDatacardPattern(".", myDatacardPattern)
    print "The following masses are considered:",massPoints
    for m in massPoints:
        # Obtain luminosity from datacard
        myLuminosity = float(limitTools.readLuminosityFromDatacard(".",myDatacardPattern%m))
        # Do plots
        doPlot(opts,int(m),nameList,allShapeNuisances,myLuminosity,myDatacardPattern,myRootfilePattern,signalTable)
    # Print signal table
    print "Max contracted uncertainty for signal:"
    for k in signalTable.keys():
#        print "Key: "+str(k)
        print "%s, %.3f--%.3f"%(k, signalTable[k]["min"],signalTable[k]["max"])
Ejemplo n.º 20
0
        print "br(tb) uncert: minus: %f-%f, plus %f-%f"%(brTBUncertMinusMin,brTBUncertMinusMax,brTBUncertPlusMin,brTBUncertPlusMax)
        print "br(taunu+tb) uncert: minus: %f-%f, plus %f-%f"%(brCombUncertMinusMin,brCombUncertMinusMax,brCombUncertPlusMin,brCombUncertPlusMax)
  

if __name__ == "__main__":

    def addToDatacards(myDir, massPoints, dataCardList, rootFileList, dataCardPattern, rootFilePattern):
        m = DatacardReader.getMassPointsForDatacardPattern(myDir, dataCardPattern)
        if len(m) > 0:
            m = DatacardReader.getMassPointsForDatacardPattern(myDir, dataCardPattern, massPoints)
            del massPoints[:]
            massPoints.extend(m)
            dataCardList.append(dataCardPattern)
            rootFileList.append(rootFilePattern)

    parser = commonLimitTools.createOptionParser(False, False, True)
    parser.add_option("--scen", dest="scenarios", action="append", default=[], help="MSSM scenarios")
    parser.add_option("-t", "--tanbeta", dest="tanbeta", action="append", default=[], help="tanbeta values (will scan only these)")
    parser.add_option("--tanbetarangemin", dest="tanbetarangemin", action="append", default=[], help="tanbeta values minimum range")
    parser.add_option("--tanbetarangemax", dest="tanbetarangemax", action="append", default=[], help="tanbeta values maximum range")
    parser.add_option("--evalUuncert", dest="evaluateUncertainties", action="store_true", default=False, help="Make plots of theoretical uncertainties")
    parser.add_option("--creategridjobs", dest="creategridjobs", action="store_true", default=False, help="Create crab task dirs for running on grid")
    parser.add_option("--gridmassive", dest="gridRunAllMassesInOneJob", action="store_true", default=False, help="Crab jobs run all masses in one job (default=1 job / mass)")
    opts = commonLimitTools.parseOptionParser(parser)
    if opts.rmin == None:
        opts.rmin = "0"
    if opts.rmax == None:
        opts.rmax = "1" # To facilitate the search for different tan beta values
    
    if opts.creategridjobs:
        print "*** Start creating individual crab job directories for grid submission ... ***"
Ejemplo n.º 21
0
def parseOptionParser(parser):
    commonLimitTools.parseOptionParser(parser)
Ejemplo n.º 22
0
def getCombineResultPassedStatus(opts, brContainer, mHp, tanbeta, resultKey, scen):
    reuseStatus = False
    if not brContainer.resultExists(mHp, tanbeta):
        # Produce cards
        myPostFix = "lhcasy_%s_mHp%s_tanbetascan%.1f"%(scen,mHp,tanbeta)
        myPostFixAllMasses = "lhcasy_%s_mHpAll_tanbetascan%.1f"%(scen,tanbeta)
        myList = os.listdir(".")
        myList.sort()
        myResultDir = None
        myResultFound = False
        for item in myList:
            if myPostFix in item or myPostFixAllMasses in item:
                myResultDir = item
        if myResultDir != None:
            myList = os.listdir("./%s"%myResultDir)
            for item in myList:
                if item.startswith("higgsCombineobs_m%s"%mHp):
                    f = ROOT.TFile.Open(os.path.join(myResultDir, item))
                    myTree = f.Get("limit")
                    myValue = array.array('d',[0])
                    myTree.SetBranchAddress("limit", myValue)
                    myResult = commonLimitTools.Result(mHp)
                    if myTree.GetEntries() != 6:
                        myResult.failed = True
                    else:
                        myResult.failed = False
                        i = 0
                        while i < myTree.GetEntries():
                            myTree.GetEvent(i)
                            if i == 0:
                                myResult.expectedMinus2Sigma = myValue[0]
                            elif i == 1:
                                myResult.expectedMinus1Sigma = myValue[0]
                            elif i == 2:
                                myResult.expected = myValue[0]
                            elif i == 3:
                                myResult.expectedPlus1Sigma = myValue[0]
                            elif i == 4:
                                myResult.expectedPlus2Sigma = myValue[0]
                            elif i == 5:
                                myResult.observed = myValue[0]
                            i += 1
                        myResultFound = True
                        brContainer._readFromDatabase(mHp, tanbeta)
                        brContainer.setCombineResult(mHp, tanbeta, myResult)
                    f.Close()
        if not myResultFound:
            massInput = [mHp]
            postFixInput = myPostFix
            if opts.gridRunAllMassesInOneJob:
                if mHp != opts.masspoints[0]:
                    return None
                massInput = opts.masspoints[:]
                postFixInput = myPostFixAllMasses
            # Result does not exist, let's calculate it
            if opts.gridRunAllMassesInOneJob:
                for m in opts.masspoints:
                    brContainer.produceScaledCards(m, tanbeta)
            else:
                brContainer.produceScaledCards(mHp, tanbeta)
            # Run Combine
            if "CMSSW_BASE" in os.environ or opts.creategridjobs:
                resultContainer = combine.produceLHCAsymptotic(opts, ".", massPoints=massInput,
                    datacardPatterns = brContainer.getDatacardPatterns(),
                    rootfilePatterns = brContainer.getRootfilePatterns(),
                    clsType = combine.LHCTypeAsymptotic(opts),
                    postfix = postFixInput,
                    quietStatus = True)
                if resultContainer != None and len(resultContainer.results) > 0:
                    result = resultContainer.results[0]
                    # Store result
                    brContainer.setCombineResult(mHp, tanbeta, result)
            else:
                print "... Skipping combine (assuming debug is intended; to run combine, do first cmsenv) ..."
    else:
        reuseStatus = True
    #if brContainer.resultExists(mHp, tanbeta):
        #myContainer = brContainer
    #else:
        #raise Exception("No datacards present")
    if opts.creategridjobs:
        return None
    
    # Print output
    s = "- mHp=%s, tanbeta=%.1f, sigmaTheory="%(mHp, tanbeta)
    if brContainer.getResult(mHp, tanbeta)["sigmaTheory"] == None:
        s += "None"
    else:
        s += "%.3f"%brContainer.getResult(mHp, tanbeta)["sigmaTheory"]
    if brContainer.getFailedStatus(mHp, tanbeta):
        s += " sigmaCombine (%s)=failed"%resultKey
    else:
        s += " sigmaCombine (%s)=%.3f, passed=%d"%(resultKey, brContainer.getCombineResultByKey(mHp, tanbeta, resultKey), brContainer.getPassedStatus(mHp, tanbeta, resultKey))
    if not reuseStatus:
        print s
    # return limit from combine
    if brContainer.getFailedStatus(mHp, tanbeta):
        return None
    return brContainer.getPassedStatus(mHp, tanbeta, resultKey)
Ejemplo n.º 23
0
def main(opts, settings, myDir):
    postfix = "taujets"

    #    lepType = True
    lhcType = True
    #    lhcTypeAsymptotic = True

    crabScheduler = "arc"
    crabOptions = {
        #        "GRID": [
        #            "ce_white_list = jade-cms.hip.fi",
        #            "ce_white_list = korundi.grid.helsinki.fi",
        #            ]
    }
    if settings.isLands():
        if opts.lepType:
            lands.generateMultiCrab(
                opts,
                myDir,
                massPoints=settings.getMassPoints(
                    commonLimitTools.LimitProcessType.TAUJETS),
                datacardPatterns=[
                    settings.getDatacardPattern(
                        commonLimitTools.LimitProcessType.TAUJETS)
                ],
                rootfilePatterns=[
                    settings.getRootfilePattern(
                        commonLimitTools.LimitProcessType.TAUJETS)
                ],
                clsType=lands.LEPType(opts.brlimit,
                                      opts.sigmabrlimit,
                                      toysPerJob=100,
                                      firstSeed=settings.getFirstSeed()),
                numberOfJobs=10,
                postfix=postfix + "_lep_toys1k",
                crabScheduler=crabScheduler,
                crabOptions=crabOptions)
        elif opts.lhcType:
            myVR = None
            if opts.brlimit:
                if commonLimitTools.isHeavyHiggs(
                        settings.getMassPoints(
                            commonLimitTools.LimitProcessType.TAUJETS)):
                    raise Exception(
                        "Error: --brlimit is not available for heavy H+! Please use --sigmabrlimit !"
                    )
                myVR = {
                    "default": None,
                    # Initially obtained from asymp. limit as min/max of +-2 sigma and observed
                    # After that, with trial and error of hybrid limit (e.g. by looking plot*.gif plots)
                    # Light H+, values calibrated with 2011A, met>50, loose delta phi
                    "80": ("0.001", "0.08", "x1.05"),
                    "90": ("0.001", "0.07", "x1.05"),
                    "100": ("0.001", "0.06", "x1.05"),
                    "120": ("0.0005", "0.04", "x1.05"),
                    "140": ("0.0001", "0.03", "x1.05"),
                    "150": ("0.0001", "0.025", "x1.03"),
                    "155": ("0.0001", "0.02", "x1.03"),
                    "160": ("0.0001", "0.02", "x1.03"),
                }
            if opts.sigmabrlimit:
                myVR = {
                    "default": None,
                    # Initially obtained from asymp. limit as min/max of +-2 sigma and observed
                    # After that, with trial and error of hybrid limit (e.g. by looking plot*.gif plots)
                    # Light H+, values calibrated with 2011A, met>50, loose delta phi
                    "80": ("1", "20", "x1.05"),
                    "90": ("1", "20", "x1.05"),
                    "100": ("0.5", "20", "x1.05"),
                    "120": ("0.5", "20", "x1.05"),
                    "140": ("0.5", "10", "x1.05"),
                    "150": ("0.1", "10", "x1.03"),
                    "155": ("0.1", "10", "x1.03"),
                    "160": ("0.1", "10", "x1.03"),
                    "180": ("0.01", "5", "x1.05"),
                    "190": ("0.01", "5", "x1.05"),
                    "200": ("0.01", "5", "x1.05"),
                    "220": ("0.01", "5", "x1.03"),
                    "250": ("0.01", "5", "x1.03"),
                    "300": ("0.01", "5", "x1.03"),
                    "350": ("0.01", "5", "x1.03"),
                    "400": ("0.005", "1", "x1.03"),
                    "500": ("0.005", "1", "x1.03"),
                    "600": ("0.005", "1", "x1.03"),
                }
            lands.generateMultiCrab(
                opts,
                myDir,
                massPoints=settings.getMassPoints(
                    commonLimitTools.LimitProcessType.TAUJETS),
                datacardPatterns=[
                    settings.getDatacardPattern(
                        commonLimitTools.LimitProcessType.TAUJETS)
                ],
                rootfilePatterns=[
                    settings.getRootfilePattern(
                        commonLimitTools.LimitProcessType.TAUJETS)
                ],
                clsType=lands.LHCType(opts.brlimit,
                                      opts.sigmabrlimit,
                                      toysCLsb=_ntoysCLsb(),
                                      toysCLb=_ntoysCLb(),
                                      firstSeed=settings.getFirstSeed(),
                                      vR=myVR),
                numberOfJobs=_njobs(),
                postfix=postfix + "_lhc_jobs160_sb150_b75",
                crabScheduler=crabScheduler,
                crabOptions=crabOptions)
        elif opts.lhcTypeAsymptotic:
            lands.produceLHCAsymptotic(
                opts,
                myDir,
                massPoints=settings.getMassPoints(
                    commonLimitTools.LimitProcessType.TAUJETS),
                datacardPatterns=[
                    settings.getDatacardPattern(
                        commonLimitTools.LimitProcessType.TAUJETS)
                ],
                rootfilePatterns=[
                    settings.getRootfilePattern(
                        commonLimitTools.LimitProcessType.TAUJETS)
                ],
                clsType=lands.LHCTypeAsymptotic(opts.brlimit,
                                                opts.sigmabrlimit),
                postfix=postfix + "_lhcasy")
        else:
            return False
    elif settings.isCombine():
        if opts.lepType:
            raise Exception(
                "LEP type Hybrid CLs not implemented yet for combine")
        elif opts.lhcType:
            raise Exception(
                "LHC type Hybrid CLs not implemented yet for combine")
        elif opts.lhcTypeAsymptotic:
            pfix = postfix + "_lhcasy"
            if opts.postfix != "":
                pfix += "_" + opts.postfix
            combine.produceLHCAsymptotic(
                opts,
                myDir,
                massPoints=settings.getMassPoints(
                    commonLimitTools.LimitProcessType.TAUJETS),
                datacardPatterns=[
                    settings.getDatacardPattern(
                        commonLimitTools.LimitProcessType.TAUJETS)
                ],
                rootfilePatterns=[
                    settings.getRootfilePattern(
                        commonLimitTools.LimitProcessType.TAUJETS)
                ],
                clsType=combine.LHCTypeAsymptotic(opts),
                postfix=pfix)
        else:
            return False
    return True
Ejemplo n.º 24
0
def parseTextResultsFromFile(brContainer, massWhiteList, scen, resultKeys):
    # Read
    name = tbtools._resultsPattern % scen
    print "Opening file '%s' for input" % (name)
    f = open(name)
    if f == None:
        raise Exception("Error: Could not open result file '%s' for input!" %
                        name)
    lines = f.readlines()
    f.close()
    # Obtain mass points
    myMassPoints = []
    if len(massWhiteList) > 0:
        myMassPoints = massWhiteList[:]
    else:
        myLine = 0
        while myLine < len(lines):
            if lines[myLine].startswith("Tan beta limit scan ("):
                s = lines[myLine].replace("Tan beta limit scan (", "").replace(
                    ") for m=", ",").replace(" and key: ",
                                             ",").replace("\n", "")
                mySplit = s.split(",")
                m = mySplit[1]
                if not m in myMassPoints:
                    myMassPoints.append(m)
            myLine += 1
    myMassPoints.sort()
    # Analyse lines
    for m in myMassPoints:
        for myKey in resultKeys:
            myBlockStart = None
            myBlockEnd = None
            myLine = 0
            while myLine < len(lines) and myBlockEnd == None:
                if lines[myLine].startswith("Tan beta limit scan (") or lines[
                        myLine].startswith("Allowed tan beta"):
                    if myBlockStart == None:
                        s = lines[myLine].replace(
                            "Tan beta limit scan (",
                            "").replace(") for m=",
                                        ",").replace(" and key: ",
                                                     ",").replace("\n", "")
                        mySplit = s.split(",")
                        if scen == mySplit[0] and m == mySplit[
                                1] and myKey == mySplit[2]:
                            # Entry found, store beginning
                            myBlockStart = myLine
                    else:
                        myBlockEnd = myLine
                myLine += 1
            if myBlockStart == None or myLine - myBlockStart > 100:
                print "... could not find results"
            else:
                myBlockEnd = myLine
            if myBlockEnd != None:
                for i in range(myBlockStart + 1, myBlockEnd - 1):
                    s = lines[i].replace("  tan beta=", "").replace(
                        " xsecTheor=",
                        "").replace(" pb, limit(%s)=" % myKey,
                                    ",").replace(" pb, passed=", ",")
                    mySplit = s.split(",")
                    if len(mySplit) > 1 and s[0] != "#" and not mySplit[2] in [
                            "failed", "n.a.", ""
                    ] and mySplit[1] != "None":
                        myTanBeta = mySplit[0]
                        tanbetakey = tbtools.constructResultKey(m, myTanBeta)
                        if not brContainer.resultExists(m, myTanBeta):
                            brContainer._results[tanbetakey] = {}
                            if mySplit[1] == "None":
                                brContainer._results[tanbetakey][
                                    "sigmaTheory"] = None
                            else:
                                brContainer._results[tanbetakey][
                                    "sigmaTheory"] = float(mySplit[1])
                            result = commonLimitTools.Result(0)
                            setattr(result, myKey, float(mySplit[2]))
                            brContainer.setCombineResult(m, myTanBeta, result)
                        else:
                            # Add result key
                            setattr(
                                brContainer._results[tanbetakey]
                                ["combineResult"], myKey, float(mySplit[2]))
    return myMassPoints