示例#1
0
    def testCombinedResult(self):
        predXSecs, rvalues = {}, {}
        for case in ["T1", "T5", "mixed"]:
            filename = self.createSLHAFile(case=case)
            deco = decompose(filename)
            expRes = database.getExpResults(
                analysisIDs=["CMS-SUS-16-050-agg"])[0]
            # print ( "Experimental result: %s" % expRes )
            tp = theoryPredictionsFor(expRes,
                                      deco,
                                      useBestDataset=False,
                                      combinedResults=True)
            for t in tp:
                predXSecs[case] = t.xsection.value
                rvalues[case] = t.getRValue(expected=True)
            if True:
                os.unlink(filename)
        ## first test: the theory prediction of the mixed scenario should be 25% of the sum
        ## 25%, because the total cross section is a fixed quantity, and half of the mixed scenario
        ## goes into asymmetric branches which we miss out on.
        self.assertAlmostEqual(
            (predXSecs["T1"] + predXSecs["T5"]).asNumber(fb),
            (4 * predXSecs["mixed"]).asNumber(fb), 2)

        ## second test: the r value of the mixed scenario * 2 must be between the r values
        ## of the pure scenarios. The factor of two comes from the fact, that we loose 50%
        ## to asymmetric branches
        self.assertTrue(rvalues["T5"] < 2 * rvalues["mixed"] < rvalues["T1"])
示例#2
0
    def testGoodFile(self):

        filename = "./testFiles/slha/lightEWinos.slha"
        topolist = slhaDecomposer.decompose(filename,doCompress=True, doInvisible=True, minmassgap = 5*GeV)
        analyses = database.getExpResults (txnames=["TChiWZoff"])
        theoryPrediction = theoryPredictionsFor(analyses[0], topolist)[0]
        conditionViolation = theoryPrediction.conditions
        self.assertEqual(conditionViolation['Cgtr([[[mu+,mu-]],[[l,nu]]],[[[e+,e-]],[[l,nu]]])'],0.)
示例#3
0
    def runPrinterMain(self, slhafile, mprinter, addTopList=False):
        """
        Main program. Displays basic use case.
    
        """

        runtime.modelFile = 'mssm'
        reload(particlesLoader)

        #Set main options for decomposition:
        sigmacut = 0.03 * fb
        mingap = 5. * GeV
        """ Decompose model (use slhaDecomposer for SLHA input or lheDecomposer
            for LHE input) """
        smstoplist = slhaDecomposer.decompose(slhafile,
                                              sigmacut,
                                              doCompress=True,
                                              doInvisible=True,
                                              minmassgap=mingap)

        #Add the decomposition result to the printers
        if addTopList:
            mprinter.addObj(smstoplist)

        listOfExpRes = database.getExpResults(analysisIDs=[
            '*:8*TeV', 'CMS-PAS-SUS-15-002', 'CMS-PAS-SUS-16-024'
        ])
        # Compute the theory predictions for each analysis
        allPredictions = []
        for expResult in listOfExpRes:
            predictions = theoryPredictionsFor(expResult, smstoplist)
            if not predictions:
                continue
            allPredictions += predictions._theoryPredictions

        maxcond = 0.2
        results = ioObjects.ResultList(allPredictions, maxcond)
        mprinter.addObj(results)

        #Add coverage information:
        coverageInfo = coverage.Uncovered(smstoplist)
        mprinter.addObj(coverageInfo)

        #Add additional information:
        databaseVersion = database.databaseVersion
        outputStatus = ioObjects.OutputStatus(
            [1, 'Input file ok'], slhafile, {
                'sigmacut': sigmacut.asNumber(fb),
                'minmassgap': mingap.asNumber(GeV),
                'maxcond': maxcond
            }, databaseVersion)
        outputStatus.status = 1
        mprinter.addObj(outputStatus)
        mprinter.flush()
示例#4
0
 def testIntegration(self):
     slhafile = './testFiles/slha/simplyGluino.slha'
     self.configureLogger()
     smstoplist = slhaDecomposer.decompose(slhafile, .1*fb, doCompress=True,
             doInvisible=True, minmassgap=5.*GeV)
     listofanalyses = database.getExpResults( 
             analysisIDs= [ "ATLAS-SUSY-2013-02", "CMS-SUS-13-012" ], 
             txnames = [ "T1" ] )
     if type(listofanalyses) != list:
         listofanalyses= [ listofanalyses] 
     for analysis in listofanalyses:
         self.checkAnalysis(analysis,smstoplist)
示例#5
0
    def testGoodFile(self):

        filename = "./testFiles/slha/lightEWinos.slha"
        topolist = slhaDecomposer.decompose(filename,
                                            doCompress=True,
                                            doInvisible=True,
                                            minmassgap=5 * GeV)
        analyses = database.getExpResults(txnames=["TChiWZoff"])
        theoryPrediction = theoryPredictionsFor(analyses[0], topolist)[0]
        conditionViolation = theoryPrediction.conditions
        self.assertEqual(
            conditionViolation[
                'Cgtr([[[mu+,mu-]],[[l,nu]]],[[[e+,e-]],[[l,nu]]])'], 0.)
示例#6
0
    def testGoodFile(self):

        listOfIDs = {'ATLAS-CONF-2013-037': [31, 32, 33, 34, 27, 28, 29, 30], 
                     'ATLAS-SUSY-2013-05' : [26]}
        filename = "./testFiles/slha/higgsinoStop.slha"
        topoList = slhaDecomposer.decompose(filename,doCompress = True, doInvisible=True, minmassgap = 5*GeV)
        resultlist = database.getExpResults(analysisIDs=['*:8*TeV','CMS-PAS-SUS-15-002','CMS-PAS-SUS-16-024'])
        for res in resultlist:
            theorypredictions = theoryPredictionsFor(res, topoList)
            if not theorypredictions: continue
            self.assertEqual(len(theorypredictions),1)
            tpIDs = theorypredictions[0].IDs 
            self.assertEqual(sorted(tpIDs),sorted(listOfIDs[res.globalInfo.id]))
示例#7
0
 def checkPrediction(self,slhafile,expID,expectedValues):
     self.configureLogger()
     smstoplist = slhaDecomposer.decompose(slhafile, 0.*fb, doCompress=True,
             doInvisible=True, minmassgap=5.*GeV)
     expresults = database.getExpResults(analysisIDs= expID)
     for expresult in expresults:
         theorypredictions = theoryPredictionsFor(expresult, smstoplist)
         for pred in theorypredictions:
             predval=pred.xsection.value 
             expval = expectedValues.pop()
             delta = expval*0.01
             self.assertAlmostEqual(predval.asNumber(fb), expval,delta=delta)
     
     self.assertTrue(len(expectedValues) == 0)
示例#8
0
    def runPrinterMain(self, slhafile, mprinter,addTopList=False):
        """
        Main program. Displays basic use case.
    
        """
        
        runtime.modelFile = 'mssm'
        reload(particlesLoader)

        #Set main options for decomposition:
        sigmacut = 0.03 * fb
        mingap = 5. * GeV
    
        """ Decompose model (use slhaDecomposer for SLHA input or lheDecomposer
            for LHE input) """
        smstoplist = slhaDecomposer.decompose(slhafile, sigmacut, 
                         doCompress=True, doInvisible=True, minmassgap=mingap )
    
        #Add the decomposition result to the printers
        if addTopList:
            mprinter.addObj(smstoplist)
    
        listOfExpRes = database.getExpResults(analysisIDs=['*:8*TeV','CMS-PAS-SUS-15-002','CMS-PAS-SUS-16-024'])
        # Compute the theory predictions for each analysis
        allPredictions = []
        for expResult in listOfExpRes:
            predictions = theoryPredictionsFor(expResult, smstoplist)
            if not predictions:
                continue
            allPredictions += predictions._theoryPredictions
        
        maxcond = 0.2
        results = ioObjects.ResultList(allPredictions,maxcond)
        mprinter.addObj(results)
        
        #Add coverage information:
        coverageInfo = coverage.Uncovered(smstoplist)
        mprinter.addObj(coverageInfo)
        
        
        #Add additional information:
        databaseVersion = database.databaseVersion
        outputStatus = ioObjects.OutputStatus([1,'Input file ok'], slhafile,
                                               {'sigmacut' : sigmacut.asNumber(fb), 
                                                'minmassgap' : mingap.asNumber(GeV),
                                                'maxcond': maxcond },
                                              databaseVersion)
        outputStatus.status = 1
        mprinter.addObj(outputStatus)
        mprinter.flush()
示例#9
0
 def testWithDisplaced(self):
     setLogLevel('error')
     self.logger.info("test decomposition with displaced vertex tracking")
     slhafile = "./testFiles/slha/hscpTest_mid.slha"
     topos = slhaDecomposer.decompose(slhafile, .1*fb, False, False, 5.*GeV)
     self.assertEqual(len(topos),1)
     topo = topos[0]
     ellist=topo.elementList
     self.assertEqual(len(ellist), 1)
     element=ellist[0]
     self.assertEqual(str(element), "[[],[]]")
     self.assertEqual(element.getFinalStates(), ['Displaced', 'Displaced'])
     weight = element.weight.getXsecsFor(13*TeV)[0].value.asNumber(fb)
     self.assertAlmostEqual(weight,0.6963,places=3)
示例#10
0
 def test(self):
     self.logger.info ( "test decomposition, no compression" )
     """ test the decomposition with no compression """
     slhafile="./testFiles/slha/simplyGluino.slha"
     topos = slhaDecomposer.decompose ( slhafile, .1*fb, False, False, 5.*GeV )
     self.assertEqual ( len(topos), 1 )
     #print len(topos),"topologies."
     topo=topos[0]
     #print topo
     ellist=topo.elementList
     self.assertEqual ( len(ellist), 1 )
     element=ellist[0]
     #print element
     self.assertEqual ( str (element), "[[[q,q]],[[q,q]]]" )
示例#11
0
def main():
    """
    Main program. Displays basic use case.

    """

    """ Input file location (either a SLHA or LHE file) """
    slhafile = 'inputFiles/slha/gluino_squarks.slha'
    # lhefile = 'inputFiles/lhe/gluino_squarks.lhe'

    """ Main options for decomposition """
    sigmacut = 0.03 * fb
    mingap = 5. * GeV

    """ Decompose model (use slhaDecomposer for SLHA input or lheDecomposer for LHE input) """
    smstoplist = slhaDecomposer.decompose(slhafile, sigmacut, doCompress=True, doInvisible=True, minmassgap=mingap)
    # smstoplist = lheDecomposer.decompose(lhefile, doCompress=True,doInvisible=True, minmassgap=mingap)

    """ Print decomposition summary. Set outputLevel=0 (no output), 1 (simple output), 2 (extended output) """
    smstoplist.printout(outputLevel=1)

    """ Load all analyses from database """
    listofanalyses = smsAnalysisFactory.load()

    """ Compute the theory predictions for each analysis """
    analysesPredictions = [theoryPredictionFor(analysis, smstoplist) for analysis in listofanalyses]

    """ Access information for each theory prediction/analysis """
    for analysisPred in analysesPredictions:
        if not analysisPred:
            """ Skip non-applicable analyses """
            continue
        
        """ If the analysis prediction contains more than one theory prediction (cluster), loop over predictions """
        for theoryPrediction in analysisPred:
            print("------------------------")
            print("Analysis name = ", theoryPrediction.analysis.label)
            
            """ Value for average cluster mass (average mass of the elements in cluster) """
            print("Prediction Mass = ", theoryPrediction.mass)
            
            """ Value for the cluster signal cross-section """
            print("Signal Cross-Section = ", theoryPrediction.value)
            
            """ Condition violation values """
            print("Condition Violation = ", theoryPrediction.conditions)

            """ Get upper limit for the respective prediction """
            print("Analysis upper limit = ", theoryPrediction.analysis.getUpperLimitFor(theoryPrediction.mass))
示例#12
0
 def test(self):
     self.logger.info("test decomposition, no compression")
     """ test the decomposition with no compression """
     slhafile = "./testFiles/slha/simplyGluino.slha"
     topos = slhaDecomposer.decompose(slhafile, .1 * fb, False, False,
                                      5. * GeV)
     self.assertEqual(len(topos), 1)
     #print len(topos),"topologies."
     topo = topos[0]
     #print topo
     ellist = topo.elementList
     self.assertEqual(len(ellist), 1)
     element = ellist[0]
     #print element
     self.assertEqual(str(element), "[[[q,q]],[[q,q]]]")
示例#13
0
 def testWithDisplaced(self):
     setLogLevel('error')
     self.logger.info("test decomposition with displaced vertex tracking")
     slhafile = "./testFiles/slha/hscpTest_mid.slha"
     topos = slhaDecomposer.decompose(slhafile, .1 * fb, False, False,
                                      5. * GeV)
     self.assertEqual(len(topos), 1)
     topo = topos[0]
     ellist = topo.elementList
     self.assertEqual(len(ellist), 1)
     element = ellist[0]
     self.assertEqual(str(element), "[[],[]]")
     self.assertEqual(element.getFinalStates(), ['Displaced', 'Displaced'])
     weight = element.weight.getXsecsFor(13 * TeV)[0].value.asNumber(fb)
     self.assertAlmostEqual(weight, 0.6963, places=3)
示例#14
0
文件: testTx.py 项目: SModelS/smodels
 def testT1(self):
     from smodels.tools.smodelsLogging import logger
     logger.info("T1")
     """ test with the T1 slha input file """
     slhafile="./testFiles/slha/simplyGluino.slha"
     topos = slhaDecomposer.decompose ( slhafile, .1*fb, False, False, 5.*GeV )
     for topo in topos:
         for element in topo.elementList:
             masses=element.getMasses()
             # print "e=",element,"masses=",masses
             mgluino=masses[0][0]
             mLSP=masses[0][1]
             self.assertEqual( str(element), "[[[q,q]],[[q,q]]]" )
             self.assertEqual( int ( mgluino / GeV ), 675 )
             self.assertEqual( int ( mLSP / GeV ), 200 )
示例#15
0
 def testT1(self):
     from smodels.tools.smodelsLogging import logger
     logger.info("T1")
     """ test with the T1 slha input file """
     slhafile = "./testFiles/slha/simplyGluino.slha"
     topos = slhaDecomposer.decompose(slhafile, .1 * fb, False, False,
                                      5. * GeV)
     for topo in topos:
         for element in topo.elementList:
             masses = element.getMasses()
             # print "e=",element,"masses=",masses
             mgluino = masses[0][0]
             mLSP = masses[0][1]
             self.assertEqual(str(element), "[[[q,q]],[[q,q]]]")
             self.assertEqual(int(mgluino / GeV), 675)
             self.assertEqual(int(mLSP / GeV), 200)
示例#16
0
def main():
    """
    Main program. Displays basic use case.

    """
    """ Input file location (either a SLHA or LHE file) """
    slhafile = 'inputFiles/slha/gluino_squarks.slha'
    # lhefile = 'inputFiles/lhe/gluino_squarks.lhe'
    """ Main options for decomposition """
    sigmacut = 0.03 * fb
    mingap = 5. * GeV
    """ Decompose model (use slhaDecomposer for SLHA input or lheDecomposer for LHE input) """
    smstoplist = slhaDecomposer.decompose(slhafile,
                                          sigmacut,
                                          doCompress=True,
                                          doInvisible=True,
                                          minmassgap=mingap)
    # smstoplist = lheDecomposer.decompose(lhefile, doCompress=True,doInvisible=True, minmassgap=mingap)
    """ Print decomposition summary. Set outputLevel=0 (no output), 1 (simple output), 2 (extended output) """
    smstoplist.printout(outputLevel=1)
    """ Load all analyses from database """
    listofanalyses = smsAnalysisFactory.load()
    """ Compute the theory predictions for each analysis """
    analysesPredictions = [
        theoryPredictionFor(analysis, smstoplist)
        for analysis in listofanalyses
    ]
    """ Access information for each theory prediction/analysis """
    for analysisPred in analysesPredictions:
        if not analysisPred:
            """ Skip non-applicable analyses """
            continue
        """ If the analysis prediction contains more than one theory prediction (cluster), loop over predictions """
        for theoryPrediction in analysisPred:
            print("------------------------")
            print("Analysis name = ", theoryPrediction.analysis.label)
            """ Value for average cluster mass (average mass of the elements in cluster) """
            print("Prediction Mass = ", theoryPrediction.mass)
            """ Value for the cluster signal cross-section """
            print("Signal Cross-Section = ", theoryPrediction.value)
            """ Condition violation values """
            print("Condition Violation = ", theoryPrediction.conditions)
            """ Get upper limit for the respective prediction """
            print(
                "Analysis upper limit = ",
                theoryPrediction.analysis.getUpperLimitFor(
                    theoryPrediction.mass))
示例#17
0
 def testInvisiblePositive(self):
     """ test the invisible compression, a positive example """
     slhafile="./testFiles/slha/higgsinoStop.slha"
     topos = slhaDecomposer.decompose ( slhafile, .1*fb, False, True, 5.*GeV )
     tested = False
     for topo in topos:
         if str(topo)!="[][]":
             continue
         for element in topo.elementList:
             if str(element)!="[[],[]]":
                 continue
             tested = True
             #print "m00=",str(element.motherElements[0][0])
             self.assertEqual ( str(element.motherElements[0][1]),"[[],[[nu,nu]]]")
             self.assertEqual ( len(element.motherElements), 1) 
             self.assertEqual ( str(element.motherElements[0][0]),"invisible" )
     self.assertTrue(tested)
示例#18
0
 def testPredictionInterface(self):
     """ A simple test to see that the interface in datasetObj
     and TheoryPrediction to the statistics tools is working correctly
     """
     expRes = database.getExpResults(analysisIDs=['CMS-SUS-13-012'])[0]
     slhafile = "./testFiles/slha/simplyGluino.slha"
     smstoplist = slhaDecomposer.decompose(slhafile)
     prediction = theoryPredictionsFor(expRes, smstoplist, deltas_rel=0.)[0]
     pred_signal_strength = prediction.xsection.value
     prediction.computeStatistics()
     ill = math.log(prediction.likelihood)
     ichi2 = prediction.chi2
     nsig = (pred_signal_strength * expRes.globalInfo.lumi).asNumber()
     m = Data(4, 2.2, 1.1**2, None, nsignal=nsig, deltas_rel=0.2)
     computer = LikelihoodComputer(m)
     dll = math.log(computer.likelihood(nsig, marginalize=False))
     self.assertAlmostEqual(ill, dll, places=2)
     dchi2 = computer.chi2(nsig, marginalize=False)
     # print ( "dchi2,ichi2",dchi2,ichi2)
     self.assertAlmostEqual(ichi2, dchi2, places=2)
示例#19
0
 def testPredictionInterface(self):
     """ A simple test to see that the interface in datasetObj
     and TheoryPrediction to the statistics tools is working correctly
     """
     expRes = database.getExpResults( analysisIDs=['CMS-SUS-13-012'] )[0]
     slhafile= "./testFiles/slha/simplyGluino.slha"
     smstoplist = slhaDecomposer.decompose( slhafile )
     prediction = theoryPredictionsFor(expRes, smstoplist,deltas_rel=0.)[0]
     pred_signal_strength = prediction.xsection.value
     prediction.computeStatistics()
     ill = math.log(prediction.likelihood)
     ichi2 = prediction.chi2
     nsig = (pred_signal_strength*expRes.globalInfo.lumi).asNumber()
     m = Data(4, 2.2, 1.1**2, None, nsignal=nsig,deltas_rel=0.2)
     computer = LikelihoodComputer(m)
     dll = math.log(computer.likelihood(nsig, marginalize=False ) )
     self.assertAlmostEqual(ill, dll, places=2)
     dchi2 = computer.chi2( nsig, marginalize=False )
     # print ( "dchi2,ichi2",dchi2,ichi2)
     self.assertAlmostEqual(ichi2, dchi2, places=2)
示例#20
0
    def testGoodFile(self):

        listOfIDs = {
            'ATLAS-CONF-2013-037': [31, 32, 33, 34, 27, 28, 29, 30],
            'ATLAS-SUSY-2013-05': [26]
        }
        filename = "./testFiles/slha/higgsinoStop.slha"
        topoList = slhaDecomposer.decompose(filename,
                                            doCompress=True,
                                            doInvisible=True,
                                            minmassgap=5 * GeV)
        resultlist = database.getExpResults(analysisIDs=[
            '*:8*TeV', 'CMS-PAS-SUS-15-002', 'CMS-PAS-SUS-16-024'
        ])
        for res in resultlist:
            theorypredictions = theoryPredictionsFor(res, topoList)
            if not theorypredictions: continue
            self.assertEqual(len(theorypredictions), 1)
            tpIDs = theorypredictions[0].IDs
            self.assertEqual(sorted(tpIDs),
                             sorted(listOfIDs[res.globalInfo.id]))
示例#21
0
 def testInvisibleNegative(self):
     """ test the invisible compression, a negative example """
     slhafile="./testFiles/slha/higgsinoStop.slha"
     topos = slhaDecomposer.decompose ( slhafile, .1*fb, False, True, 5.*GeV )
     tested = False
     for topo in topos:
         if str(topo)!="[1,1][1,1]":
             continue
         for element in topo.elementList:
             if str(element)!="[[[q],[W+]],[[t-],[t+]]]":
                 continue
             #print
             #print topo,element,"mother:",len(element.motherElements),element.motherElements
             #for x in element.motherElements: 
             #    print "m0",str(x[0]),str(x[1])
             #if len(e.motherElements)==1 and e.motherElements[0]=="uncompressed":
             #    print topo,e,e.motherElements
             #self.assertEqual ( str(e.motherElements[0]),"uncompressed" )
             tested = True
             self.assertEqual ( len(element.motherElements),0 )
             #self.assertEqual ( str(element.motherElements[0][1]),"[]")
             #self.assertEqual ( str(e.compressionAlgorithms[0]),"none" )
     self.assertTrue(tested)
示例#22
0
 def testMass(self):
     """ test the mass compression, a positive example """
     tested = False
     slhafile="./testFiles/slha/higgsinoStop.slha"
     topos = slhaDecomposer.decompose ( slhafile, .1*fb, True, False, 5.*GeV )
     for topo in topos:
         if str(topo)!="[1][1]":
             continue
         for element in topo.elementList:
             if str(element)!="[[[b]],[[b]]]":
                 continue
             masses=element.motherElements[0][1].getMasses()
             #for x in element.motherElements:
             #    if str(x[0])=="mass":
             #        print str(topo),str(element),str(x[1])
             #        print "    --- ",masses
             tested = True
             dm=abs(masses[0][1]-masses[0][2])/GeV
             ## #self.assertEqual(str(element.motherElements[0][1]),"[[[e-],[nu]],[[ta+],[ta-]]]")
             self.assertEqual(len(element.motherElements),24 )
             self.assertEqual(str(element.motherElements[0][0]),"mass" )
             self.assertTrue ( dm < 5.0 )
             # print(element.elID)
     self.assertTrue(tested)
示例#23
0
# In[4]:

# load list of analyses from database
listOfAnalyses = smsAnalysisFactory.load()

# In[5]:

# Define the SLHA file name
filename = "%s/inputFiles/slha/gluino_squarks.slha" % installDirectory()

# In[6]:

# Perform the decomposition:
listOfTopologies = slhaDecomposer.decompose(filename,
                                            sigcut=0.5 * fb,
                                            doCompress=True,
                                            doInvisible=True,
                                            minmassgap=5 * GeV)

# In[7]:

# Initiate missing Topologies for 8 TeV
missingtopos = missingTopologies.MissingTopoList(8 * TeV)

# In[8]:

# Check listOfTopologies against listOfAnalyses to find missing topologies
missingtopos.findMissingTopos(listOfTopologies,
                              listOfAnalyses,
                              minmassgap=5 * GeV,
                              doCompress=True,
def testPoint(inputFile, outputDir, parser, databaseVersion, listOfExpRes):
    """
    Test model point defined in input file (running decomposition, check
    results, test coverage)

    :parameter inputFile: path to input file
    :parameter outputDir: path to directory where output is be stored
    :parameter parser: ConfigParser storing information from parameters file
    :parameter databaseVersion: Database version (printed to output file)
    :parameter listOfExpRes: list of ExpResult objects to be considered
    :returns: output of printers
    """
    """Get run parameters and options from the parser"""
    sigmacut = parser.getfloat("parameters", "sigmacut") * fb
    minmassgap = parser.getfloat("parameters", "minmassgap") * GeV
    inputType = parser.get("options", "inputType").lower()
    """Setup output printers"""
    masterPrinter = MPrinter()
    masterPrinter.setPrinterOptions(parser)
    masterPrinter.setOutPutFiles(
        os.path.join(outputDir, os.path.basename(inputFile)))
    """ Add list of analyses loaded to printer"""
    masterPrinter.addObj(ExpResultList(listOfExpRes))
    """Check input file for errors"""
    inputStatus = ioObjects.FileStatus()
    if parser.getboolean("options", "checkInput"):
        inputStatus.checkFile(inputType, inputFile, sigmacut)
    """Initialize output status and exit if there were errors in the input"""
    outputStatus = ioObjects.OutputStatus(inputStatus.status, inputFile,
                                          dict(parser.items("parameters")),
                                          databaseVersion)
    masterPrinter.addObj(outputStatus)
    if outputStatus.status < 0:
        return masterPrinter.flush()
    """
    Decompose input file
    ====================
    """
    try:
        """ Decompose input SLHA file, store the output elements in smstoplist """
        if inputType == 'slha':
            smstoplist = slhaDecomposer.decompose(
                inputFile,
                sigmacut,
                doCompress=parser.getboolean("options", "doCompress"),
                doInvisible=parser.getboolean("options", "doInvisible"),
                minmassgap=minmassgap)
        else:
            smstoplist = lheDecomposer.decompose(
                inputFile,
                doCompress=parser.getboolean("options", "doCompress"),
                doInvisible=parser.getboolean("options", "doInvisible"),
                minmassgap=minmassgap)
    except SModelSError as e:
        print("Exception %s %s" % (e, type(e)))
        """ Update status to fail, print error message and exit """
        outputStatus.updateStatus(-1)
        return masterPrinter.flush()
    """ Print Decomposition output.
        If no topologies with sigma > sigmacut are found, update status, write
        output file, stop running """
    if not smstoplist:
        outputStatus.updateStatus(-3)
        return masterPrinter.flush()

    masterPrinter.addObj(smstoplist)
    """
    Compute theory predictions
    ====================================================
    """
    """ Get theory prediction for each analysis and print basic output """
    allPredictions = []
    for expResult in listOfExpRes:
        theorypredictions = theoryPredictionsFor(expResult, smstoplist)
        if not theorypredictions: continue
        allPredictions += theorypredictions._theoryPredictions
    """Compute chi-square and likelihood"""
    if parser.getboolean("options", "computeStatistics"):
        for theoPred in allPredictions:
            theoPred.computeStatistics()
    """ Define result list that collects all theoryPrediction objects."""
    maxcond = parser.getfloat("parameters", "maxcond")
    results = ioObjects.ResultList(allPredictions, maxcond)

    if not results.isEmpty():
        outputStatus.updateStatus(1)
        masterPrinter.addObj(results)
    else:
        outputStatus.updateStatus(0)  # no results after enforcing maxcond

    if parser.getboolean("options", "testCoverage"):
        """ Testing coverage of model point, add results to the output file """
        uncovered = coverage.Uncovered(smstoplist)
        masterPrinter.addObj(uncovered)

    return masterPrinter.flush()
示例#25
0
# In[4]:

# load list of analyses from database
listOfAnalyses = smsAnalysisFactory.load()


# In[5]:

# Define the SLHA file name
filename = "%s/inputFiles/slha/gluino_squarks.slha" % installDirectory()


# In[6]:

# Perform the decomposition:
listOfTopologies = slhaDecomposer.decompose (filename, sigcut=0.5*fb, doCompress=True, doInvisible=True, minmassgap=5*GeV)


# In[7]:

# Initiate missing Topologies for 8 TeV
missingtopos = missingTopologies.MissingTopoList(8*TeV)


# In[8]:

# Check listOfTopologies against listOfAnalyses to find missing topologies
missingtopos.findMissingTopos(listOfTopologies, listOfAnalyses, minmassgap=5*GeV,doCompress=True, doInvisible=True)


# In[9]:
示例#26
0
def main(inputFile, parameterFile, outputFile, slhaOutputFile, particlePath):
    """
    Provides a command line interface to basic SModelS functionalities.
    
    :param inputFile: input file name (either a SLHA or LHE file)
    :param parameterFile: File containing the input parameters (default = /etc/parameters_default.ini)
    :param outputFile: Output file to write a summary of results
    :param slhaOutputFile: Output file to write SLHA type summary of results
    :param particlePath: Path to directory where particles.py is stored
    
    """

    """
    Read and check input file
    =========================
    """
    parser = SafeConfigParser()
    parser.read(parameterFile)

    """ Minimum value of cross-section for an element to be considered eligible for decomposition.
        Too small sigmacut leads to too large decomposition time. """
    sigmacut = parser.getfloat("parameters", "sigmacut") * fb

    """ Minimum value for considering two states non-degenerate (only used for mass compression) """
    minmassgap = parser.getfloat("parameters", "minmassgap") * GeV

    if os.path.exists(outputFile):
        log.warning("Removing old output file in " + outputFile)
    outfile = open(outputFile, 'w')
    outfile.close()

    databaseVersion = "unknown" # set default database version that is printed in case of errors

    """ Set doCompress flag, only used for slha type output """
    if parser.getboolean("options", "doCompress") or parser.getboolean("options", "doInvisible"): docompress = 1
    else: docompress = 0

    """
    check if particles.py exists in specified path, and add to sys.path
    """
    if not os.path.isfile(os.path.join(particlePath,"particles.py")):
        log.error("particle.py not found in %s" %particlePath )
        return slhaPrinter.writeSLHA(None, parser.getfloat("parameters", "maxcond"), minmassgap, sigmacut, None, databaseVersion, docompress, slhaOutputFile)
    else:
        sys.path.insert(1, particlePath)
        from smodels.tools import ioObjects, missingTopologies
        from smodels.experiment import smsHelpers, smsAnalysisFactory
        from smodels.theory import slhaDecomposer, lheDecomposer
        from smodels.theory.theoryPrediction import theoryPredictionFor


    inputType = parser.get("options", "inputType").lower()
    if inputType != 'slha' and inputType != 'lhe':
        log.error("Unknown input type (must be SLHA or LHE): %s" % inputType)
        return slhaPrinter.writeSLHA(None, parser.getfloat("parameters", "maxcond"), minmassgap, sigmacut, None, databaseVersion, docompress, slhaOutputFile)

    """ Check input file for errors """
    inputStatus = ioObjects.FileStatus()
    if parser.getboolean("options", "checkInput"):
        inputStatus.checkFile(inputType, inputFile, sigmacut)

    """ Check database location """
    try:
        smsHelpers.base = parser.get("path", "databasePath")
        if smsHelpers.base == "./smodels-database" or smsHelpers.base == "./smodels-database/": smsHelpers.base = installDirectory()+"/smodels-database/"
        databaseVersion = smsHelpers.databaseVersion()
    except:
        log.error("Database not found in %s" % os.path.realpath(smsHelpers.base))
        return slhaPrinter.writeSLHA(None, parser.getfloat("parameters", "maxcond"), minmassgap, sigmacut, None, databaseVersion, docompress, slhaOutputFile)

    """ Initialize output status and exit if there were errors in the input """
    outputStatus = ioObjects.OutputStatus(inputStatus.status, inputFile, dict(parser.items("parameters")), databaseVersion, outputFile)
    if outputStatus.status < 0:
        return slhaPrinter.writeSLHA(None, parser.getfloat("parameters", "maxcond"), minmassgap, sigmacut, None, databaseVersion, docompress, slhaOutputFile)

    """
    Decompose input file
    ====================
    """
    try:
        """ Decompose input SLHA file, store the output elements in smstoplist """
        if inputType == 'slha':
            smstoplist = slhaDecomposer.decompose(inputFile, sigmacut, doCompress=parser.getboolean("options", "doCompress"),
                         doInvisible=parser.getboolean("options", "doInvisible"), minmassgap=minmassgap)
        else:
            smstoplist = lheDecomposer.decompose(inputFile, doCompress=parser.getboolean("options", "doCompress"),
                         doInvisible=parser.getboolean("options", "doInvisible"), minmassgap=minmassgap)
    except:
        """ Update status to fail, print error message and exit """
        outputStatus.updateStatus(-1)
        return slhaPrinter.writeSLHA(None, parser.getfloat("parameters", "maxcond"), minmassgap, sigmacut, None, databaseVersion, docompress, slhaOutputFile)

    """ Print Decomposition output.
        If no topologies with sigma > sigmacut are found, update status, write output file, stop running """
    if not smstoplist:
        outputStatus.updateStatus(-3)
        return slhaPrinter.writeSLHA(None, parser.getfloat("parameters", "maxcond"), minmassgap, sigmacut, None, databaseVersion, docompress, slhaOutputFile)

    outLevel = 0
    if parser.getboolean("stdout", "printDecomp"):
        outLevel = 1
        outLevel += parser.getboolean("stdout", "addElmentInfo")
    smstoplist.printout(outputLevel=outLevel)


    """
    Load analysis database
    ======================
    """
    
    """ In case that a list of analyses or txnames are given, retrieve list """
    analyses = parser.get("database", "analyses")
    if "," in analyses:
        analyses = analyses.split(",")
    txnames = parser.get("database", "txnames")
    if "," in txnames:
        txnames = txnames.split(",")
    
    """ Load analyses """
    listofanalyses = smsAnalysisFactory.load(analyses, txnames)

    """ Print list of analyses loaded """
    if parser.getboolean("stdout", "printAnalyses"):
        outLevel = 1
        outLevel += parser.getboolean("stdout", "addAnaInfo")
        print("=======================\n == List of Analyses   ====\n ================")
        for analysis in listofanalyses:
            analysis.printout(outputLevel=outLevel)


    """
    Compute theory predictions and anlalyses constraints
    ====================================================
    """

    """ Define result list that collects all theoryPrediction objects.
        Variables set to define printing options. """
    results = ioObjects.ResultList(bestresultonly=not parser.getboolean("file", "expandedSummary"),
                                   describeTopo=parser.getboolean("file", "addConstraintInfo"))

    """ Get theory prediction for each analysis and print basic output """
    for analysis in listofanalyses:
        theorypredictions = theoryPredictionFor(analysis, smstoplist)
        if not theorypredictions:
            continue
        if parser.getboolean("stdout", "printResults"):
            print "================================================================================"
            theorypredictions.printout()
        print "................................................................................"

        """ Create a list of results, to determine the best result """
        for theoryprediction in theorypredictions:
            results.addResult(theoryprediction, maxcond=parser.getfloat("parameters", "maxcond"))

    """ If there is no best result, this means that there are no matching experimental results for the point """
    if results.isEmpty():
        """ no experimental constraints found """
        outputStatus.updateStatus(0)
    else:
        outputStatus.updateStatus(1)

    """ Write output file """
    outputStatus.printout("file", outputFile)
    """ Add experimental constraints if found """
    if outputStatus.status == 1:
        results.printout("file", outputFile)

    sqrts = max([xsec.info.sqrts for xsec in smstoplist.getTotalWeight()])
    if parser.getboolean("options", "findMissingTopos"):
        """ Look for missing topologies, add them to the output file """
        missingtopos = missingTopologies.MissingTopoList(sqrts)
        missingtopos.findMissingTopos(smstoplist, listofanalyses, minmassgap, parser.getboolean("options", "doCompress"),
                         doInvisible=parser.getboolean("options", "doInvisible"))
        missingtopos.printout("file", outputFile)
    slhaPrinter.writeSLHA(results, parser.getfloat("parameters", "maxcond"), minmassgap, sigmacut, missingtopos, databaseVersion, docompress, slhaOutputFile)
示例#27
0
def main():
    """
    Main program. Displays basic use case.
    """
    
    #Define your model (list of rEven and rOdd particles)
    particlesLoader.load( 'smodels.share.models.mssm' ) #Make sure all the model particles are up-to-date
    
    # Path to input file (either a SLHA or LHE file)
    slhafile = 'inputFiles/slha/lightEWinos.slha'
    lhefile = 'inputFiles/lhe/gluino_squarks.lhe'

    # Set main options for decomposition
    sigmacut = 0.01 * fb
    mingap = 5. * GeV

    
    # Decompose model (use slhaDecomposer for SLHA input or lheDecomposer for LHE input)
    slhaInput = True
    if slhaInput:
        toplist = slhaDecomposer.decompose(slhafile, sigmacut, doCompress=True, doInvisible=True, minmassgap=mingap)
    else:
        toplist = lheDecomposer.decompose(lhefile, doCompress=True,doInvisible=True, minmassgap=mingap)
    # Access basic information from decomposition, using the topology list and topology objects:
    print( "\n Decomposition Results: " )
    print( "\t  Total number of topologies: %i " %len(toplist) )
    nel = sum([len(top.elementList) for top in toplist])
    print( "\t  Total number of elements = %i " %nel )
    #Print information about the m-th topology (if it exists):
    m = 2
    if len(toplist) > m:
        top = toplist[m]
        print( "\t\t %i-th topology  = " %m,top,"with total cross section =",top.getTotalWeight() )
        #Print information about the n-th element in the m-th topology:
        n = 0
        el = top.elementList[n]
        print( "\t\t %i-th element from %i-th topology  = " %(n,m),el, end="" )
        print( "\n\t\t\twith final states =",el.getFinalStates(),"\n\t\t\twith cross section =",el.weight,"\n\t\t\tand masses = ",el.getMasses() )
            
    # Load the experimental results to be used.
    # In this case, all results are employed.
    listOfExpRes = database.getExpResults()

    # Print basic information about the results loaded.
    # Count the number of loaded UL and EM experimental results:
    nUL, nEM = 0, 0
    for exp in listOfExpRes:
        expType = exp.getValuesFor('dataType')[0]
        if expType == 'upperLimit':
            nUL += 1
        elif  expType == 'efficiencyMap':
            nEM += 1
    print( "\n Loaded Database with %i UL results and %i EM results " %(nUL,nEM) )

    # Compute the theory predictions for each experimental result and print them:
    print("\n Theory Predictions and Constraints:")
    rmax = 0.
    bestResult = None
    for expResult in listOfExpRes:
        predictions = theoryPredictionsFor(expResult, toplist, combinedResults=False, marginalize=False)
        if not predictions: continue # Skip if there are no constraints from this result
        print('\n %s ' %expResult.globalInfo.id)
        for theoryPrediction in predictions:
            dataset = theoryPrediction.dataset
            datasetID = dataset.dataInfo.dataId            
            mass = theoryPrediction.mass
            txnames = [str(txname) for txname in theoryPrediction.txnames]
            PIDs =  theoryPrediction.PIDs         
            print( "------------------------" )
            print( "Dataset = ",datasetID )   #Analysis name
            print( "TxNames = ",txnames )  
            print( "Prediction Mass = ",mass )   #Value for average cluster mass (average mass of the elements in cluster)
            print( "Prediction PIDs = ",PIDs )   #Value for average cluster mass (average mass of the elements in cluster)
            print( "Theory Prediction = ",theoryPrediction.xsection )  #Signal cross section
            print( "Condition Violation = ",theoryPrediction.conditions ) #Condition violation values
              
            # Get the corresponding upper limit:
            print( "UL for theory prediction = ",theoryPrediction.upperLimit )

            # Compute the r-value
            r = theoryPrediction.getRValue()
            print( "r = ",r )
            #Compute likelihhod and chi^2 for EM-type results:
            if dataset.dataInfo.dataType == 'efficiencyMap':
                theoryPrediction.computeStatistics()
                print( 'Chi2, likelihood=', theoryPrediction.chi2, theoryPrediction.likelihood )
            if r > rmax:
                rmax = r
                bestResult = expResult.globalInfo.id
            
    # Print the most constraining experimental result
    print( "\nThe largest r-value (theory/upper limit ratio) is ",rmax )
    if rmax > 1.:
        print( "(The input model is likely excluded by %s)" %bestResult )
    else:
        print( "(The input model is not excluded by the simplified model results)" )
      
    #Find out missing topologies for sqrts=8*TeV:
    uncovered = coverage.Uncovered(toplist,sqrts=8.*TeV)
    #Print uncovered cross-sections:
    print( "\nTotal missing topology cross section (fb): %10.3E\n" %(uncovered.getMissingXsec()) )
    print( "Total cross section where we are outside the mass grid (fb): %10.3E\n" %(uncovered.getOutOfGridXsec()) )
    print( "Total cross section in long cascade decays (fb): %10.3E\n" %(uncovered.getLongCascadeXsec()) )
    print( "Total cross section in decays with asymmetric branches (fb): %10.3E\n" %(uncovered.getAsymmetricXsec()) )
    
    #Print some of the missing topologies:
    print( 'Missing topologies (up to 3):' )
    for topo in uncovered.missingTopos.topos[:3]:
        print( 'Topology:',topo.topo )
        print( 'Contributing elements (up to 2):' )
        for el in topo.contributingElements[:2]:
            print( el,'cross-section (fb):', el.missingX )
    
    #Print elements with long cascade decay:
    print( '\nElements outside the grid (up to 2):' )
    for topo in uncovered.outsideGrid.topos[:2]:
        print( 'Topology:',topo.topo )
        print( 'Contributing elements (up to 4):' )
        for el in topo.contributingElements[:4]:
            print( el,'cross-section (fb):', el.missingX )
            print( '\tmass:',el.getMasses() )
示例#28
0
    def RunSModelS(self,SLHAFilePath,SummaryFilePath):
        # Set the path to the database
        database = Database("/home/oo1m20/softwares/smodels-1.2.2/smodels-database")

        self.SummaryFilePath = os.path.abspath(SummaryFilePath)

        #Define your model (list of rEven and rOdd particles)
        particlesLoader.load( 'smodels.share.models.secumssm' ) #Make sure all the model particles are up-to-date
    
        # Path to input file (either a SLHA or LHE file)
        self.SLHAFilePath = SLHAFilePath
        slhafile = self.SLHAFilePath
        #lhefile = 'inputFiles/lhe/gluino_squarks.lhe'

        # Set main options for decomposition
        sigmacut = 0.01 * fb
        mingap = 5. * GeV

    
        # Decompose model (use slhaDecomposer for SLHA input or lheDecomposer for LHE input)
        slhaInput = True
        if slhaInput:
            toplist = slhaDecomposer.decompose(slhafile, sigmacut, doCompress=True, doInvisible=True, minmassgap=mingap)
        else:
            toplist = lheDecomposer.decompose(lhefile, doCompress=True,doInvisible=True, minmassgap=mingap)
        # Access basic information from decomposition, using the topology list and topology objects:
        f= open(self.SummaryFilePath,"a+")
        print( "\n Decomposition Results: ", file=f )
        print( "\t  Total number of topologies: %i " %len(toplist), file=f )
        nel = sum([len(top.elementList) for top in toplist])
        print( "\t  Total number of elements = %i " %nel , file=f)
        #Print information about the m-th topology (if it exists):
        m = 2
        if len(toplist) > m:
            top = toplist[m]
            print( "\t\t %i-th topology  = " %m,top,"with total cross section =",top.getTotalWeight(), file=f )
            #Print information about the n-th element in the m-th topology:
            n = 0
            el = top.elementList[n]
            print( "\t\t %i-th element from %i-th topology  = " %(n,m),el, end="", file=f )
            print( "\n\t\t\twith final states =",el.getFinalStates(),"\n\t\t\twith cross section =",el.weight,"\n\t\t\tand masses = ",el.getMasses(), file=f )
            
        # Load the experimental results to be used.
        # In this case, all results are employed.
        listOfExpRes = database.getExpResults()

        # Print basic information about the results loaded.
        # Count the number of loaded UL and EM experimental results:
        nUL, nEM = 0, 0
        for exp in listOfExpRes:
            expType = exp.getValuesFor('dataType')[0]
            if expType == 'upperLimit':
                nUL += 1
            elif  expType == 'efficiencyMap':
                nEM += 1
        print( "\n Loaded Database with %i UL results and %i EM results " %(nUL,nEM), file=f )

        # Compute the theory predictions for each experimental result and print them:
        print("\n Theory Predictions and Constraints:", file=f)
        rmax = 0.
        bestResult = None
        for expResult in listOfExpRes:
            predictions = theoryPredictionsFor(expResult, toplist, combinedResults=False, marginalize=False)
            if not predictions: continue # Skip if there are no constraints from this result
            print('\n %s ' %expResult.globalInfo.id, file=f)
            for theoryPrediction in predictions:
                dataset = theoryPrediction.dataset
                datasetID = dataset.dataInfo.dataId            
                mass = theoryPrediction.mass
                txnames = [str(txname) for txname in theoryPrediction.txnames]
                PIDs =  theoryPrediction.PIDs         
                print( "------------------------", file=f )
                print( "Dataset = ", datasetID, file=f )   #Analysis name
                print( "TxNames = ", txnames, file=f )  
                print( "Prediction Mass = ",mass, file=f )   #Value for average cluster mass (average mass of the elements in cluster)
                print( "Prediction PIDs = ",PIDs, file=f )   #Value for average cluster mass (average mass of the elements in cluster)
                print( "Theory Prediction = ",theoryPrediction.xsection, file=f )  #Signal cross section
                print( "Condition Violation = ",theoryPrediction.conditions, file=f ) #Condition violation values
              
                # Get the corresponding upper limit:
                print( "UL for theory prediction = ",theoryPrediction.upperLimit, file=f )

                # Compute the r-value
                r = theoryPrediction.getRValue()
                print( "r = ",r , file=f)
                #Compute likelihhod and chi^2 for EM-type results:
                if dataset.dataInfo.dataType == 'efficiencyMap':
                    theoryPrediction.computeStatistics()
                    print( 'Chi2, likelihood=', theoryPrediction.chi2, theoryPrediction.likelihood, file=f )
                if r > rmax:
                    rmax = r
                    bestResult = expResult.globalInfo.id

        # Print the most constraining experimental result
        print( "\nThe largest r-value (theory/upper limit ratio) is ",rmax, file=f )
        if rmax > 1.:
            print( "(The input model is likely excluded by %s)" %bestResult, file=f )
        else:
            print( "(The input model is not excluded by the simplified model results)", file=f )

        f.close()
示例#29
0
# In[3]:

#Define the SLHA input file name
filename = "%s/inputFiles/slha/gluino_squarks.slha" % installDirectory()

# In[4]:

#Load the database, do the decomposition and compute theory predictions:
#(Look at the theory predictions HowTo to learn how to compute theory predictions)
databasepath = os.path.join(os.getenv("HOME"), "smodels-database/")
database = Database(databasepath)
expResults = database.getExpResults()
topList = slhaDecomposer.decompose(filename,
                                   sigcut=0.03 * fb,
                                   doCompress=True,
                                   doInvisible=True,
                                   minmassgap=5 * GeV)
allThPredictions = [theoryPredictionsFor(exp, topList) for exp in expResults]

# In[5]:

#Print the value of each theory prediction for each experimental
#result and the corresponding upper limit (see the obtain experimental upper limits HowTo to learn how
#to compute the upper limits).
#Also print the expected upper limit, if available
for thPreds in allThPredictions:
    if not thPreds: continue  #skip results with no predictions
    for theoryPred in thPreds:
        expID = theoryPred.expResult.globalInfo.id
        dataType = theoryPred.dataset.dataInfo.dataType
示例#30
0
def main():
    """
    Main program. Displays basic use case.

    """

    # Path to input file (either a SLHA or LHE file)
    slhafile = 'inputFiles/slha/lightEWinos.slha'
    # lhefile = 'inputFiles/lhe/gluino_squarks.lhe'

    # Set main options for decomposition
    sigmacut = 0.3 * fb
    mingap = 5. * GeV

    # Decompose model (use slhaDecomposer for SLHA input or lheDecomposer for LHE input)
    toplist = slhaDecomposer.decompose(slhafile,
                                       sigmacut,
                                       doCompress=True,
                                       doInvisible=True,
                                       minmassgap=mingap)
    # toplist = lheDecomposer.decompose(lhefile, doCompress=True,doInvisible=True, minmassgap=mingap)

    # Access basic information from decomposition, using the topology list and topology objects:
    print "\n Decomposition Results: "
    print "\t  Total number of topologies: %i " % len(toplist)
    nel = sum([len(top.elementList) for top in toplist])
    print "\t  Total number of elements = %i " % nel
    #Print information about the m-th topology:
    m = 3
    top = toplist[m]
    print "\t\t %i-th topology  = " % m, top, "with total cross section =", top.getTotalWeight(
    )
    #Print information about the n-th element in the m-th topology:
    n = 0
    el = top.elementList[n]
    print "\t\t %i-th element from %i-th topology  = " % (n, m), el,
    print "\n\t\t\twith cross section =", el.weight, "\n\t\t\tand masses = ", el.getMasses(
    )

    # Load the experimental results to be used.
    # In this case, all results are employed.
    listOfExpRes = database.getExpResults()

    # Print basic information about the results loaded.
    # Count the number of loaded UL and EM experimental results:
    nUL, nEM = 0, 0
    for exp in listOfExpRes:
        expType = exp.getValuesFor('dataType')[0]
        if expType == 'upperLimit':
            nUL += 1
        elif expType == 'efficiencyMap':
            nEM += 1
    print "\n Loaded Database with %i UL results and %i EM results " % (nUL,
                                                                        nEM)

    # Compute the theory predictions for each experimental result and print them:
    print("\n Theory Predictions and Constraints:")
    rmax = 0.
    bestResult = None
    for expResult in listOfExpRes:
        predictions = theoryPredictionsFor(expResult, toplist)
        if not predictions:
            continue  # Skip if there are no constraints from this result
        print('\n %s ' % expResult.globalInfo.id)
        for theoryPrediction in predictions:
            dataset = theoryPrediction.dataset
            datasetID = dataset.dataInfo.dataId
            mass = theoryPrediction.mass
            txnames = [str(txname) for txname in theoryPrediction.txnames]
            PIDs = theoryPrediction.PIDs
            print "------------------------"
            print "Dataset = ", datasetID  #Analysis name
            print "TxNames = ", txnames
            print "Prediction Mass = ", mass  #Value for average cluster mass (average mass of the elements in cluster)
            print "Prediction PIDs = ", PIDs  #Value for average cluster mass (average mass of the elements in cluster)
            print "Theory Prediction = ", theoryPrediction.xsection  #Signal cross section
            print "Condition Violation = ", theoryPrediction.conditions  #Condition violation values

            # Get the corresponding upper limit:
            ul = expResult.getUpperLimitFor(txname=txnames[0],
                                            mass=mass,
                                            dataID=datasetID)
            print "UL for theory prediction = ", ul

            # Compute the r-value
            r = theoryPrediction.xsection.value / ul
            print "r = ", r
            if r > rmax:
                rmax = r
                bestResult = expResult.globalInfo.id

    # Print the most constraining experimental result
    print "\nThe largest r-value (theory/upper limit ratio) is ", rmax
    if rmax > 1.:
        print "(The input model is likely excluded by %s)" % bestResult
    else:
        print "(The input model is not excluded by the simplified model results)"
示例#31
0
def main():
    """
    Main program. Displays basic use case.
    """

    #Define your model (list of rEven and rOdd particles)
    particlesLoader.load('smodels.share.models.mssm'
                         )  #Make sure all the model particles are up-to-date

    # Path to input file (either a SLHA or LHE file)
    slhafile = 'inputFiles/slha/lightEWinos.slha'
    lhefile = 'inputFiles/lhe/gluino_squarks.lhe'

    # Set main options for decomposition
    sigmacut = 0.01 * fb
    mingap = 5. * GeV

    # Decompose model (use slhaDecomposer for SLHA input or lheDecomposer for LHE input)
    slhaInput = True
    if slhaInput:
        toplist = slhaDecomposer.decompose(slhafile,
                                           sigmacut,
                                           doCompress=True,
                                           doInvisible=True,
                                           minmassgap=mingap)
    else:
        toplist = lheDecomposer.decompose(lhefile,
                                          doCompress=True,
                                          doInvisible=True,
                                          minmassgap=mingap)
    # Access basic information from decomposition, using the topology list and topology objects:
    print("\n Decomposition Results: ")
    print("\t  Total number of topologies: %i " % len(toplist))
    nel = sum([len(top.elementList) for top in toplist])
    print("\t  Total number of elements = %i " % nel)
    #Print information about the m-th topology (if it exists):
    m = 2
    if len(toplist) > m:
        top = toplist[m]
        print("\t\t %i-th topology  = " % m, top, "with total cross section =",
              top.getTotalWeight())
        #Print information about the n-th element in the m-th topology:
        n = 0
        el = top.elementList[n]
        print("\t\t %i-th element from %i-th topology  = " % (n, m),
              el,
              end="")
        print("\n\t\t\twith final states =", el.getFinalStates(),
              "\n\t\t\twith cross section =", el.weight,
              "\n\t\t\tand masses = ", el.getMasses())

    # Load the experimental results to be used.
    # In this case, all results are employed.
    listOfExpRes = database.getExpResults()

    # Print basic information about the results loaded.
    # Count the number of loaded UL and EM experimental results:
    nUL, nEM = 0, 0
    for exp in listOfExpRes:
        expType = exp.getValuesFor('dataType')[0]
        if expType == 'upperLimit':
            nUL += 1
        elif expType == 'efficiencyMap':
            nEM += 1
    print("\n Loaded Database with %i UL results and %i EM results " %
          (nUL, nEM))

    # Compute the theory predictions for each experimental result and print them:
    print("\n Theory Predictions and Constraints:")
    rmax = 0.
    bestResult = None
    for expResult in listOfExpRes:
        predictions = theoryPredictionsFor(expResult,
                                           toplist,
                                           combinedResults=False,
                                           marginalize=False)
        if not predictions:
            continue  # Skip if there are no constraints from this result
        print('\n %s ' % expResult.globalInfo.id)
        for theoryPrediction in predictions:
            dataset = theoryPrediction.dataset
            datasetID = dataset.dataInfo.dataId
            mass = theoryPrediction.mass
            txnames = [str(txname) for txname in theoryPrediction.txnames]
            PIDs = theoryPrediction.PIDs
            print("------------------------")
            print("Dataset = ", datasetID)  #Analysis name
            print("TxNames = ", txnames)
            print(
                "Prediction Mass = ", mass
            )  #Value for average cluster mass (average mass of the elements in cluster)
            print(
                "Prediction PIDs = ", PIDs
            )  #Value for average cluster mass (average mass of the elements in cluster)
            print("Theory Prediction = ",
                  theoryPrediction.xsection)  #Signal cross section
            print("Condition Violation = ",
                  theoryPrediction.conditions)  #Condition violation values

            # Get the corresponding upper limit:
            print("UL for theory prediction = ", theoryPrediction.upperLimit)

            # Compute the r-value
            r = theoryPrediction.getRValue()
            print("r = ", r)
            #Compute likelihhod and chi^2 for EM-type results:
            if dataset.dataInfo.dataType == 'efficiencyMap':
                theoryPrediction.computeStatistics()
                print('Chi2, likelihood=', theoryPrediction.chi2,
                      theoryPrediction.likelihood)
            if r > rmax:
                rmax = r
                bestResult = expResult.globalInfo.id

    # Print the most constraining experimental result
    print("\nThe largest r-value (theory/upper limit ratio) is ", rmax)
    if rmax > 1.:
        print("(The input model is likely excluded by %s)" % bestResult)
    else:
        print(
            "(The input model is not excluded by the simplified model results)"
        )

    #Find out missing topologies for sqrts=8*TeV:
    uncovered = coverage.Uncovered(toplist, sqrts=8. * TeV)
    #Print uncovered cross-sections:
    print("\nTotal missing topology cross section (fb): %10.3E\n" %
          (uncovered.getMissingXsec()))
    print(
        "Total cross section where we are outside the mass grid (fb): %10.3E\n"
        % (uncovered.getOutOfGridXsec()))
    print("Total cross section in long cascade decays (fb): %10.3E\n" %
          (uncovered.getLongCascadeXsec()))
    print(
        "Total cross section in decays with asymmetric branches (fb): %10.3E\n"
        % (uncovered.getAsymmetricXsec()))

    #Print some of the missing topologies:
    print('Missing topologies (up to 3):')
    for topo in uncovered.missingTopos.topos[:3]:
        print('Topology:', topo.topo)
        print('Contributing elements (up to 2):')
        for el in topo.contributingElements[:2]:
            print(el, 'cross-section (fb):', el.missingX)

    #Print elements with long cascade decay:
    print('\nElements outside the grid (up to 2):')
    for topo in uncovered.outsideGrid.topos[:2]:
        print('Topology:', topo.topo)
        print('Contributing elements (up to 4):')
        for el in topo.contributingElements[:4]:
            print(el, 'cross-section (fb):', el.missingX)
            print('\tmass:', el.getMasses())
示例#32
0
def main(inputFile, parameterFile, outputFile, slhaOutputFile, particlePath):
    """
    Provides a command line interface to basic SModelS functionalities.
    
    :param inputFile: input file name (either a SLHA or LHE file)
    :param parameterFile: File containing the input parameters (default = /etc/parameters_default.ini)
    :param outputFile: Output file to write a summary of results
    :param slhaOutputFile: Output file to write SLHA type summary of results
    :param particlePath: Path to directory where particles.py is stored
    
    """
    """
    Read and check input file
    =========================
    """
    parser = SafeConfigParser()
    parser.read(parameterFile)
    """ Minimum value of cross-section for an element to be considered eligible for decomposition.
        Too small sigmacut leads to too large decomposition time. """
    sigmacut = parser.getfloat("parameters", "sigmacut") * fb
    """ Minimum value for considering two states non-degenerate (only used for mass compression) """
    minmassgap = parser.getfloat("parameters", "minmassgap") * GeV

    if os.path.exists(outputFile):
        log.warning("Removing old output file in " + outputFile)
    outfile = open(outputFile, 'w')
    outfile.close()

    databaseVersion = "unknown"  # set default database version that is printed in case of errors
    """ Set doCompress flag, only used for slha type output """
    if parser.getboolean("options", "doCompress") or parser.getboolean(
            "options", "doInvisible"):
        docompress = 1
    else:
        docompress = 0
    """
    check if particles.py exists in specified path, and add to sys.path
    """
    if not os.path.isfile(os.path.join(particlePath, "particles.py")):
        log.error("particle.py not found in %s" % particlePath)
        return slhaPrinter.writeSLHA(None,
                                     parser.getfloat("parameters",
                                                     "maxcond"), minmassgap,
                                     sigmacut, None, databaseVersion,
                                     docompress, slhaOutputFile)
    else:
        sys.path.insert(1, particlePath)
        from smodels.tools import ioObjects, missingTopologies
        from smodels.experiment import smsHelpers, smsAnalysisFactory
        from smodels.theory import slhaDecomposer, lheDecomposer
        from smodels.theory.theoryPrediction import theoryPredictionFor

    inputType = parser.get("options", "inputType").lower()
    if inputType != 'slha' and inputType != 'lhe':
        log.error("Unknown input type (must be SLHA or LHE): %s" % inputType)
        return slhaPrinter.writeSLHA(None,
                                     parser.getfloat("parameters",
                                                     "maxcond"), minmassgap,
                                     sigmacut, None, databaseVersion,
                                     docompress, slhaOutputFile)
    """ Check input file for errors """
    inputStatus = ioObjects.FileStatus()
    if parser.getboolean("options", "checkInput"):
        inputStatus.checkFile(inputType, inputFile, sigmacut)
    """ Check database location """
    try:
        smsHelpers.base = parser.get("path", "databasePath")
        if smsHelpers.base == "./smodels-database" or smsHelpers.base == "./smodels-database/":
            smsHelpers.base = installDirectory() + "/smodels-database/"
        databaseVersion = smsHelpers.databaseVersion()
    except:
        log.error("Database not found in %s" %
                  os.path.realpath(smsHelpers.base))
        return slhaPrinter.writeSLHA(None,
                                     parser.getfloat("parameters",
                                                     "maxcond"), minmassgap,
                                     sigmacut, None, databaseVersion,
                                     docompress, slhaOutputFile)
    """ Initialize output status and exit if there were errors in the input """
    outputStatus = ioObjects.OutputStatus(inputStatus.status, inputFile,
                                          dict(parser.items("parameters")),
                                          databaseVersion, outputFile)
    if outputStatus.status < 0:
        return slhaPrinter.writeSLHA(None,
                                     parser.getfloat("parameters",
                                                     "maxcond"), minmassgap,
                                     sigmacut, None, databaseVersion,
                                     docompress, slhaOutputFile)
    """
    Decompose input file
    ====================
    """
    try:
        """ Decompose input SLHA file, store the output elements in smstoplist """
        if inputType == 'slha':
            smstoplist = slhaDecomposer.decompose(
                inputFile,
                sigmacut,
                doCompress=parser.getboolean("options", "doCompress"),
                doInvisible=parser.getboolean("options", "doInvisible"),
                minmassgap=minmassgap)
        else:
            smstoplist = lheDecomposer.decompose(
                inputFile,
                doCompress=parser.getboolean("options", "doCompress"),
                doInvisible=parser.getboolean("options", "doInvisible"),
                minmassgap=minmassgap)
    except:
        """ Update status to fail, print error message and exit """
        outputStatus.updateStatus(-1)
        return slhaPrinter.writeSLHA(None,
                                     parser.getfloat("parameters",
                                                     "maxcond"), minmassgap,
                                     sigmacut, None, databaseVersion,
                                     docompress, slhaOutputFile)
    """ Print Decomposition output.
        If no topologies with sigma > sigmacut are found, update status, write output file, stop running """
    if not smstoplist:
        outputStatus.updateStatus(-3)
        return slhaPrinter.writeSLHA(None,
                                     parser.getfloat("parameters",
                                                     "maxcond"), minmassgap,
                                     sigmacut, None, databaseVersion,
                                     docompress, slhaOutputFile)

    outLevel = 0
    if parser.getboolean("stdout", "printDecomp"):
        outLevel = 1
        outLevel += parser.getboolean("stdout", "addElmentInfo")
    smstoplist.printout(outputLevel=outLevel)
    """
    Load analysis database
    ======================
    """
    """ In case that a list of analyses or txnames are given, retrieve list """
    analyses = parser.get("database", "analyses")
    if "," in analyses:
        analyses = analyses.split(",")
    txnames = parser.get("database", "txnames")
    if "," in txnames:
        txnames = txnames.split(",")
    """ Load analyses """
    listofanalyses = smsAnalysisFactory.load(analyses, txnames)
    """ Print list of analyses loaded """
    if parser.getboolean("stdout", "printAnalyses"):
        outLevel = 1
        outLevel += parser.getboolean("stdout", "addAnaInfo")
        print(
            "=======================\n == List of Analyses   ====\n ================"
        )
        for analysis in listofanalyses:
            analysis.printout(outputLevel=outLevel)
    """
    Compute theory predictions and anlalyses constraints
    ====================================================
    """
    """ Define result list that collects all theoryPrediction objects.
        Variables set to define printing options. """
    results = ioObjects.ResultList(
        bestresultonly=not parser.getboolean("file", "expandedSummary"),
        describeTopo=parser.getboolean("file", "addConstraintInfo"))
    """ Get theory prediction for each analysis and print basic output """
    for analysis in listofanalyses:
        theorypredictions = theoryPredictionFor(analysis, smstoplist)
        if not theorypredictions:
            continue
        if parser.getboolean("stdout", "printResults"):
            print "================================================================================"
            theorypredictions.printout()
        print "................................................................................"
        """ Create a list of results, to determine the best result """
        for theoryprediction in theorypredictions:
            results.addResult(theoryprediction,
                              maxcond=parser.getfloat("parameters", "maxcond"))
    """ If there is no best result, this means that there are no matching experimental results for the point """
    if results.isEmpty():
        """ no experimental constraints found """
        outputStatus.updateStatus(0)
    else:
        outputStatus.updateStatus(1)
    """ Write output file """
    outputStatus.printout("file", outputFile)
    """ Add experimental constraints if found """
    if outputStatus.status == 1:
        results.printout("file", outputFile)

    sqrts = max([xsec.info.sqrts for xsec in smstoplist.getTotalWeight()])
    if parser.getboolean("options", "findMissingTopos"):
        """ Look for missing topologies, add them to the output file """
        missingtopos = missingTopologies.MissingTopoList(sqrts)
        missingtopos.findMissingTopos(
            smstoplist,
            listofanalyses,
            minmassgap,
            parser.getboolean("options", "doCompress"),
            doInvisible=parser.getboolean("options", "doInvisible"))
        missingtopos.printout("file", outputFile)
    slhaPrinter.writeSLHA(results, parser.getfloat("parameters", "maxcond"),
                          minmassgap, sigmacut, missingtopos, databaseVersion,
                          docompress, slhaOutputFile)