Beispiel #1
0
 def testWritePickle(self):
     """ tests writing pickle file """
     binfile = "./.database.pcl"
     if os.path.exists ( binfile ):
         os.unlink ( binfile )
     self.logger.info ( "test writing pickle file """ )
     writer = Database ( "./tinydb/", force_load = "txt" )
     writer.createBinaryFile ( binfile )
     reader = Database ( binfile, force_load="pcl" )
     os.unlink ( binfile )
     self.assertEqual( writer, reader )
Beispiel #2
0
    def initialize( self ):
        self.setStatus ( "initialized" )
        Cache.n_stored = 20000 ## crank up the caching
        self.db = Database ( self.dbpath )
        self.expResults = self.db.expResultList
        self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        self.server_address = ( self.servername, self.port )
        self.pprint ( 'starting up on %s port %s' % self.server_address )
        self.pprint ( 'I will be serving database %s at %s' % \
                      (self.db.databaseVersion, self.dbpath ) )
        try:
            self.sock.bind( self.server_address )
        except OSError as e:
            self.pprint ( "exception %s. is host ''%s'' reachable?" % \
                          ( e, self.server_address ) )
            sys.exit(-1)
        # Listen for incoming connections
        self.sock.listen(1)

        atexit.register ( shutdownAll )

        while True:
            # Wait for a connection
            self.setStatus ( "waiting" )
            self.log ( 'waiting for a connection' )
            self.connection, self.client_address = self.sock.accept()
            self.listen()
Beispiel #3
0
def checkOneAnalysis():
    import argparse
    #import IPython
    argparser = argparse.ArgumentParser(
        description='print the correlations of one specific analysis')
    argparser.add_argument(
        '-d',
        '--dbpath',
        help='specify path to database [<rundir>/database.pcl]',
        type=str,
        default="<rundir>/database.pcl")
    argparser.add_argument('-a',
                           '--analysis',
                           help='print for <analysis>',
                           type=str,
                           default="CMS-SUS-19-006")
    args = argparser.parse_args()
    from smodels.experiment.databaseObj import Database
    print("[analysisCombiner] checking %s" % args.dbpath)
    db = Database(args.dbpath)
    results = db.getExpResults()
    info = getInfoFromAnaId(args.analysis, results)
    sqrts = info.sqrts
    collaboration = getExperimentName(info)
    prettyName = info.prettyName
    if args.analysis in moreComments:
        prettyName += " (%s)" % moreComments[args.analysis]
    # IPython.embed()
    print("correlations for %s: %s" % (args.analysis, prettyName))
    combs, nocombs = set(), set()
    pnames = {}
    for er in results:
        if er.globalInfo.sqrts != sqrts:
            continue
        if getExperimentName(er.globalInfo) != collaboration:
            continue
        Id = er.globalInfo.id
        Id = Id.replace("-eff", "").replace("-agg", "")
        if Id == "CMS-SUS-19-006-2":
            Id = "CMS-SUS-19-006"
        if Id == args.analysis:
            continue
        pname = er.globalInfo.prettyName
        if Id in moreComments:
            pname += " (%s)" % moreComments[Id]
        pnames[Id] = pname
        cc = canCombine(info, er.globalInfo, "aggressive")
        # cc = canCombine ( pred, er.globalInfo )
        if cc:
            combs.add(Id)
        else:
            nocombs.add(Id)
    print("can combine with: ")
    for Id in combs:
        pname = pnames[Id]
        print(" `- %s: %s" % (Id, pname))
    print("cannot combine with: ")
    for Id in nocombs:
        pname = pnames[Id]
        print(" `- %s: %s" % (Id, pname))
Beispiel #4
0
def collect():
    db = Database ( "./database.pcl" ) # , force_load = "txt" )
    ers = db.getExpResults ( dataTypes = [ "upperLimit" ], onlyWithExpected=True )
    allSs = []
    for er in ers:
        txnlist = er.datasets[0].txnameList
        for txn in txnlist:
            ct=0
            origdata = eval(txn.txnameData.origdata)
            for point in origdata:
                m = point[0]
                rul = point[1]
                ul,eul=None,None
                try:
                    ul = txn.getULFor(m, False )
                    eul = txn.getULFor(m, True )
                except Exception:
                    pass
                if type(ul) == type(None) or type(eul) == type(None):
                    continue
                sigma = eul / 1.96
                S = float ( ( ul - eul ) / sigma )
                if (S < -1.8 or S > 3.5) and ct<3:
                # if S > 10. and ct<3:
                    print ( )
                    print ( "S=%.2f for ul=%s, eul=%s sigma=%s" % ( S, ul, eul, sigma ) )
                    print ( "  at ", er.globalInfo.id, txn.txName, m, "rul", rul )
                    ct += 1
                allSs.append ( S )
                # print ( "->", er.globalInfo.id, txn, S )
    print ("all", min(allSs), np.mean(allSs), max(allSs) )
    f=open("ulSs.pcl","wb")
    pickle.dump(allSs,f)
    f.close()
    sys.exit()
Beispiel #5
0
 def getPrettyName ( self, anaid ):
     """ get pretty name of ana id """
     if False: ## set to true and we have the old analysis Ids
         return anaid
     if not hasattr ( self, "database" ):
         from smodels.experiment.databaseObj import Database
         dbname = "./original.pcl" 
         dbname = "/home/walten/git/smodels-database"
         dbname = "/scratch-cbe/users/wolfgan.waltenberger/rundir/db31.pcl"
         self.database = Database ( dbname )
     from smodels_utils.helper.prettyDescriptions import prettyTexAnalysisName
     if ":" in anaid:
         anaid = anaid[:anaid.find(":")]
     ers = self.database.getExpResults ( analysisIDs = [ anaid ] )
     for er in ers:
        if hasattr ( er.globalInfo, "prettyName" ):
           pn = er.globalInfo.prettyName
           sqrts = er.globalInfo.sqrts.asNumber(TeV)
           ret = prettyTexAnalysisName ( pn, sqrts, dropEtmiss = True,
                                     collaboration = True, anaid = er.globalInfo.id )
           # for the 2020 paper to be consistent
           ret = ret.replace( "+ top tag", "stop" )
           ret = ret.replace( "+ 4 (1 b-)jets", "multijet" )
           # ret += " -> " + anaid
           return ret
     # print ( "found no pretty name", ers[0].globalInfo )
     return anaid
Beispiel #6
0
 def __init__(self,
              walkerid,
              dbpath="./default.pcl",
              expected=False,
              select="all",
              do_combine=False):
     """
     :param do_combine: if True, then also use combined results,
                        both via simplified likelihoods and pyhf.
     """
     self.walkerid = walkerid
     self.do_combine = do_combine
     self.modifier = None
     self.select = select
     self.expected = expected
     self.rthreshold = 1.3  ## threshold for rmax
     if expected:
         from expResModifier import ExpResModifier
         self.modifier = ExpResModifier()
     force_load = None
     if dbpath.endswith(".pcl"):
         force_load = "pcl"
     ntries = 0
     while not os.path.exists(dbpath):
         ## give it a few tries
         ntries += 1
         time.sleep(ntries * 5)
         if ntries > 5:
             break
     self.database = Database(dbpath, force_load=force_load)
     self.fetchResults()
     self.combiner = Combiner(self.walkerid)
 def testFallBack(self):
     #Test if smodels/experiment/defaultFinalStates.py is being used if databaseParticle.py is missing
     dbpath = "./database_simple"
     dbOld = Database(dbpath, discard_zeroes=False, force_load='txt')
     model = dbOld.databaseParticles
     self.assertEqual(model.label,
                      'DB Final States (default)')  #Simple fallback test
def loadDatabase(parser, db):
    """
    Load database
    
    :parameter parser: ConfigParser with path to database
    :parameter db: binary database object. If None, then database is loaded,
                   according to databasePath. If True, then database is loaded,
                   and text mode is forced.
    :returns: database object, database version
        
    """
    try:
        databasePath = parser.get("path", "databasePath")
        if databasePath == "micromegas":
            databasePath = installDirectory() + "/smodels-database/"
        database = db
        if database in [None, True]:
            force_load = None
            if database == True: force_load = "txt"
            database = Database(databasePath, force_load=force_load)
        databaseVersion = database.databaseVersion
    except DatabaseNotFoundException:
        logger.error("Database not found in %s" %
                     os.path.realpath(databasePath))
        sys.exit()
    return database, databaseVersion
Beispiel #9
0
    def testCompare(self):
        warnings.simplefilter("ignore", ResourceWarning)
        from simplyGluino_default import smodelsOutputDefault
        filename = "./testFiles/slha/simplyGluino.slha"
        port = random.choice(range(31700, 42000))
        # port = 31744
        dbfile = "database/db30.pcl"
        dbfile = "unittest"

        startserver = f"../smodels/tools/smodelsTools.py proxydb -p {port} -i {dbfile} -o ./proxy.pcl -r -v error"
        cmd = startserver.split(" ")
        # print ( "starting server %s" % startserver )
        myenv = os.environ.copy()
        pp = ""
        if "PYTHONPATH" in myenv:
            pp = ":" + myenv["PYTHONPATH"]
        myenv["PYTHONPATH"] = "../" + pp
        subprocess.Popen(cmd, env=myenv)

        time.sleep(3)

        db = Database("./proxy.pcl")
        outputfile = runMain(filename,
                             suppressStdout=True,
                             overridedatabase=db)
        smodelsOutput = importModule(outputfile)

        client = DatabaseClient(port=port, verbose="warn")
        client.send_shutdown()

        ignoreFields = [
            'input file', 'smodels version', 'ncpus', 'Element',
            'database version', 'Total missed xsec', 'Missed xsec long-lived',
            'Missed xsec displaced', 'Missed xsec MET',
            'Total outside grid xsec',
            'Total xsec for missing topologies (fb)',
            'Total xsec for missing topologies with displaced decays (fb)',
            'Total xsec for missing topologies with prompt decays (fb)',
            'Total xsec for topologies outside the grid (fb)'
        ]
        smodelsOutputDefault['ExptRes'] = sorted(
            smodelsOutputDefault['ExptRes'],
            key=lambda res: res['r'],
            reverse=True)
        equals = equalObjs(smodelsOutput,
                           smodelsOutputDefault,
                           allowedDiff=0.08,
                           ignore=ignoreFields,
                           fname=outputfile)
        if not equals:
            e = "simplyGluino.py != simplyGluino_default.py"
            logger.error(e)
            # raise AssertionError( e )

        self.assertTrue(equals)
        self.removeOutputs(outputfile)
Beispiel #10
0
def getDatabaseVersion(protomodel, dbpath="default.pcl"):
    dbver = "???"
    if hasattr(protomodel, "dbversion"):
        dbver = protomodel.dbversion
        if not "???" in dbver:
            return dbver
    if os.path.exists(dbpath):
        ## try to get db version from db file
        from smodels.experiment.databaseObj import Database
        db = Database(dbpath)
        dbver = db.databaseVersion
    return dbver
Beispiel #11
0
def getSRs():
    from smodels.experiment.databaseObj import Database
    db = Database ( "official" )
    ers = db.getExpResults( dataTypes=[ "efficiencyMap" ] )
    stats = []
    for er in ers:
        for ds in er.datasets:
            D = { "obsN": ds.dataInfo.observedN, "expectedBG": ds.dataInfo.expectedBG,
                  "bgError": ds.dataInfo.bgError, "upperLimit": ds.dataInfo.upperLimit,
                  "expectedUpperLimit": ds.dataInfo.expectedUpperLimit }
            stats.append ( D )
    return stats
Beispiel #12
0
 def __init__(self, inputfile, rundir, verbose="info"):
     self.inputfile = inputfile
     self.rundir = rundir
     self.nr = 0
     self.verbstring = verbose
     if type(verbose) == int:
         self.verbose = verbose
     else:
         verbose = verbose.lower()
         verbs = {"err": 10, "warn": 20, "info": 30, "debug": 40}
         self.verbose = 50
         for k, v in verbs.items():
             if k in verbose:
                 self.verbose = v
     self.database = Database(self.inputfile)
    def __init__(self, database, force_txt = False ):
        """
        :param database: Path to the database or Database object
        """

        self._selectedExpResults = []
        load = None
        if force_txt == True:
            load = "txt"
        if isinstance(database,str):
            self.database = Database(database, load )
        elif isinstance(database,Database):
            self.database = database
        else:
            logger.error("The input must be the database location or a Database object.")
            raise SModelSError()
        self.loadAllResults()
Beispiel #14
0
def loadDatabase(parser, db):
    """
    Load database

    :parameter parser: ConfigParser with path to database
    :parameter db: binary database object. If None, then database is loaded,
                   according to databasePath. If True, then database is loaded,
                   and text mode is forced.
    :returns: database object, database version

    """
    try:
        dp = parser.get("path", "databasePath")
        logger.error("``[path] databasePath'' in ini file is deprecated; " \
           "use ``[database] path'' instead.(See e.g. smodels/etc/parameters_default.ini)")
        parser.set("database", "path", dp)
    except (NoSectionError, NoOptionError) as e:
        ## path.databasePath not set. This is good.
        pass
    try:
        database = db
        # logger.error("database=db: %s" % database)
        if database in [None, True]:
            databasePath = parser.get("database", "path")
            checkForSemicolon(databasePath, "database", "path")
            discard_zeroes = True
            try:
                discard_zeroes = parser.getboolean("database", "discardZeroes")
            except (NoSectionError, NoOptionError) as e:
                logger.debug(
                    "database:discardZeroes is not given in config file. Defaulting to 'True'."
                )
            force_load = None
            if database == True: force_load = "txt"
            if os.path.isfile(databasePath):
                force_load = "pcl"
            database = Database(databasePath, force_load=force_load, \
                                 discard_zeroes = discard_zeroes)
        databaseVersion = database.databaseVersion
    except DatabaseNotFoundException:
        logger.error("Database not found in ``%s''" %
                     os.path.realpath(databasePath))
        sys.exit()
    return database, databaseVersion
Beispiel #15
0
def getSummary():
    from smodels.experiment.databaseObj import Database
    # dbpath = "official"
    dbpath = "<rundir>/database.pcl"
    print("[analysisCombiner] checking %s" % dbpath)
    db = Database(dbpath)
    results = db.getExpResults()
    strategy = "aggressive"
    ana1 = "CMS-SUS-16-042"
    ana2 = "CMS-SUS-16-033"
    canC = canCombine(ana1, ana2, strategy, results)
    print("[analysisCombiner] can combine %s with %s: %s" %
          (ana1, ana2, str(canC)))
    ctr, combinable = 0, 0
    for x, e in enumerate(results):
        for y, f in enumerate(results):
            if y <= x:
                continue
            ctr += 1
            isUn = canCombine(e.globalInfo, f.globalInfo, strategy)
            combinable += isUn
    print("[analysisCombiner] can combine %d/%d pairs of results" %
          (combinable, ctr))
Beispiel #16
0
    def __init__(self, database, force_txt = False ):
        """
        :ivar _selectedExpResults: list of experimental results loaded in the browser.
                           Can be used to hold a subset of results in the database.
                           By default all results are loaded.
        
        
        :param database: Path to the database or Database object
        """

        self._selectedExpResults = []
        load = None
        if force_txt == True:
            load = "txt"
        if isinstance(database,str):
            if database.endswith(".pcl"):
                load = "pcl"
            self.database = Database(database, load )
        elif isinstance(database,Database):
            self.database = database
        else:
            logger.error("The input must be the database location or a Database object.")
            raise SModelSError()
        self.loadAllResults()
Beispiel #17
0
.. module:: Example
   :synopsis: Basic main file example for using SModelS.
   
   This file must be run under the installation folder.

"""
""" Import basic functions (this file must be executed in the installation folder) """

from smodels.theory import slhaDecomposer
from smodels.theory import lheDecomposer
from smodels.tools.physicsUnits import fb, GeV
from smodels.theory.theoryPrediction import theoryPredictionsFor
from smodels.experiment.databaseObj import Database

# Set the path to the database folder
database = Database("./smodels-database/")


def main():
    """
    Main program. Displays basic use case.

    """

    # Path to input file (either a SLHA or LHE file)
    slhafile = 'inputFiles/slha/lightEWinos.slha'
    # lhefile = 'inputFiles/lhe/gluino_squarks.lhe'

    # Set main options for decomposition
    sigmacut = 0.3 * fb
    mingap = 5. * GeV
Beispiel #18
0
def draw( strategy, databasepath, trianglePlot, miscol,
          diagcol, experiment, S, drawtimestamp, outputfile, nofastlim ):
    """
    :param trianglePlot: if True, then only plot the upper triangle of this
                         symmetrical matrix
    :param miscol: color to use when likelihood is missing
    :param diagcol: color to use for diagonal
    :param experiment: draw only for specific experiment ("CMS", "ATLAS", "all" )
    :param S: draw only for specific sqrts ( "8", "13", "all" )
    :param drawtimestamp: if true, put a timestamp on plot
    :param outputfile: file name of output file (matrix.png)
    :param nofastlim: if True, discard fastlim results
    """
    ROOT.gStyle.SetOptStat(0000)

    ROOT.gROOT.SetBatch()
    cols = [ ROOT.kRed+1, ROOT.kWhite, ROOT.kGreen+1, miscol, diagcol ]
    ROOT.gStyle.SetPalette(len(cols), (ctypes.c_int * len(cols))(*cols) )
    ROOT.gStyle.SetNumberContours(len(cols))

    ROOT.gStyle.SetPadLeftMargin(.25)

    sqrtses = [ 8, 13 ]
    if S not in [ "all" ]:
        sqrtses = [ int(S) ]

    colors.on = True
    setLogLevel ( "debug" )

    # dir = "/home/walten/git/smodels-database/"
    dir = databasepath
    d=Database( dir, discard_zeroes = True )
    print(d)
    analysisIds = [ "all" ]
    exps = [ "CMS", "ATLAS" ]
    if experiment in [ "CMS", "ATLAS" ]:
        analysisIds = [ experiment+"*" ]
        exps = [ experiment ]
    results = d.getExpResults( analysisIDs = analysisIds )
    if nofastlim:
        results = noFastlim ( results )
    results = sortOutDupes ( results )
    if S in [ "8", "13" ]:
        results = sortBySqrts ( results, int(S) )

    #results.sort()
    nres = len ( results )

    ROOT.c1=ROOT.TCanvas("c1","c1",1770,1540)
    ROOT.c1.SetLeftMargin(0.18)
    ROOT.c1.SetBottomMargin(0.21)
    ROOT.c1.SetTopMargin(0.06)
    ROOT.c1.SetRightMargin(0.01)
    if nres > 60:
        ROOT.c1.SetLeftMargin(0.12) ## seemed to work for 96 results
        ROOT.c1.SetBottomMargin(0.15)
        ROOT.c1.SetTopMargin(0.09)
        ROOT.c1.SetRightMargin(0.015)

    h=ROOT.TH2F ( "Correlations", "",
                  nres, 0., nres, nres, 0., nres )
    xaxis = h.GetXaxis()
    yaxis = h.GetYaxis()

    sze = 0.13 / math.sqrt ( nres )
    xaxis.SetLabelSize( 1.3*sze )
    yaxis.SetLabelSize( 1.3*sze )

    bins= { "CMS": { 8: [999,0], 13:[999,0] },
            "ATLAS": { 8: [999,0], 13: [999,0] } }

    n = len(results )
    for x,e in enumerate(results):
        label = e.globalInfo.id
        hasLikelihood = hasLLHD ( e )
        ana = analysisCombiner.getExperimentName ( e.globalInfo )
        #if not hasLikelihood:
        #    print ( "no likelihood: %s" % label )
        sqrts = int(e.globalInfo.sqrts.asNumber(TeV))
        color = ROOT.kCyan+2
        ymax=0
        if ana == "ATLAS":
            color = ROOT.kBlue+1
        if sqrts > 10.:
            color += 2
        if x < bins[ana][sqrts][0]:
            bins[ana][sqrts][0]=x
        if x > bins[ana][sqrts][1]:
            bins[ana][sqrts][1]=x
            ymax=x
        color = ROOT.kGray+2
        if len(exps)==1 and len(sqrtses)==1:
            label = label.replace("CMS-","").replace("ATLAS-","").replace("-agg","")
        label = "#color[%d]{%s}" % (color, label )
        xaxis.SetBinLabel(n-x, label )
        yaxis.SetBinLabel(x+1, label )
        for y,f in enumerate(results):
            if trianglePlot and y>x:
                continue
            isUn = analysisCombiner.canCombine ( e.globalInfo, f.globalInfo, strategy )
            # isUn = e.isUncorrelatedWith ( f )
            if isUn:
                h.SetBinContent ( n-x, y+1, 1. )
            else:
                h.SetBinContent ( n-x, y+1, -1. )
            if not hasLikelihood or not hasLLHD ( f ): ## has no llhd? cannot be combined
                h.SetBinContent ( n-x, y+1, 2. )
            if y==x:
                h.SetBinContent ( n-x, y+1, 3. )

    h.Draw("col")
    ROOT.bins, ROOT.xbins, ROOT.lines = {}, {}, []
    if len(exps)==1 and len(sqrtses)==1:
        ROOT.t1 = ROOT.TLatex()
        ROOT.t1.SetNDC()
        ROOT.t1.DrawLatex ( .45, .95, "%s, %d TeV" % ( exps[0], sqrtses[0] ) )
        
    for ana in exps:
        for sqrts in sqrtses:
            name= "%s%d" % ( ana, sqrts )
            ROOT.bins[name] = ROOT.TLatex()
            ROOT.bins[name].SetTextColorAlpha(ROOT.kBlack,.7)
            ROOT.bins[name].SetTextSize(.025)
            ROOT.bins[name].SetTextAngle(90.)
            ROOT.xbins[name] = ROOT.TLatex()
            ROOT.xbins[name].SetTextColorAlpha(ROOT.kBlack,.7)
            ROOT.xbins[name].SetTextSize(.025)
            xcoord = .5 * ( bins[ana][sqrts][0] + bins[ana][sqrts][1] )
            ycoord = n- .5 * ( bins[ana][sqrts][0] + bins[ana][sqrts][1] ) -3
            if len(sqrtses)>1 or len(exps)>1:
                ROOT.bins[name].DrawLatex(-4,xcoord-3,"#splitline{%s}{%d TeV}" % ( ana, sqrts ) )
                ROOT.xbins[name].DrawLatex(ycoord,-5,"#splitline{%s}{%d TeV}" % ( ana, sqrts ) )
            yt = bins[ana][sqrts][1] +1
            extrudes = 3 # how far does the line extrude into tick labels?
            xmax = n
            if trianglePlot:
                xmax = n-yt
            line = ROOT.TLine ( -extrudes, yt, xmax, yt )
            line.SetLineWidth(2)
            line.Draw()
            ymax = n
            if trianglePlot:
                ymax = yt
            xline = ROOT.TLine ( n-yt, ymax, n-yt, -extrudes )
            xline.SetLineWidth(2)
            xline.Draw()
            ROOT.lines.append ( line )
            ROOT.lines.append ( xline )
    line = ROOT.TLine ( -extrudes, 0, xmax, 0 )
    line.SetLineWidth(2)
    line.Draw()
    xline = ROOT.TLine ( n, ymax, n, -extrudes )
    xline.SetLineWidth(2)
    xline.Draw()
    ROOT.lines.append ( line )
    ROOT.lines.append ( xline )
    h.LabelsOption("v","X")
    if trianglePlot:
        for i in range(n+1):
            wline = ROOT.TLine ( n, i, n-i, i )
            wline.SetLineColor ( ROOT.kWhite )
            wline.Draw ()
            ROOT.lines.append ( wline )
            vline = ROOT.TLine ( i, n-i, i, n )
            vline.SetLineColor ( ROOT.kWhite )
            vline.Draw ()
        ROOT.lines.append ( vline )
        ROOT.title = ROOT.TLatex()
        ROOT.title.SetNDC()
        ROOT.title.SetTextSize(.025 )
        ROOT.title.DrawLatex(.28,.89, "#font[132]{Correlations between analyses, combination strategy: ,,%s''}" % strategy )
    ROOT.boxes = []
    if trianglePlot:
        for i,b in enumerate ( [ "pair is uncorrelated", "pair is correlated", "likelihood is missing" ] ):
            bx = 51
            by = 68 - 3*i
            box = ROOT.TBox(bx,by,bx+1,by+1)
            c = cols[i]
            if i > 0:
                c = cols[i+1]
            box.SetFillColor ( c )
            box.Draw()
            ROOT.boxes.append ( box )
            l = ROOT.TLatex()
            l.SetTextSize(.022)
            #if i == 2:
            #    c = 16
            l.SetTextColor ( c )
            b="#font[132]{%s}" % b ## add font
            l.DrawLatex ( bx+2, by, b )
            ROOT.boxes.append ( l )
    l = ROOT.TLatex()
    l.SetNDC()
    l.SetTextColor(ROOT.kGray+1)
    l.SetTextSize(.015)
    if drawtimestamp:
        l.DrawLatex ( .01, .01, "plot produced %s from database v%s" % \
                      ( time.strftime("%h %d %Y" ), d.databaseVersion ) )
    ROOT.gPad.SetGrid()
    if "@M" in outputfile:
        modifiers = ""
        if len(exps)==1:
            modifiers += exps[0]
        if len(sqrtses)==1:
            modifiers += str(sqrtses[0])
        outputfile = outputfile.replace("@M",modifiers)
    print ( "Plotting to %s" % outputfile )
    ROOT.c1.Print( outputfile )
Beispiel #19
0
#Set up the path to SModelS installation folder if running on a different folder
import sys, os
sys.path.append(os.path.join(os.getenv("HOME"), "smodels/"))

# In[2]:

#Import those parts of smodels that are needed for this exercise
from smodels.tools.physicsUnits import GeV
from smodels.experiment.databaseObj import Database

# In[3]:

## Load the database:
databasePath = os.path.join(os.getenv("HOME"), "smodels-database/")
db = Database(databasePath)

# ## Look up upper limit for an Upper Limit-type result:

# In[4]:

#Select desired result:
resultID = ["CMS-PAS-SUS-13-016"]
txname = ["T1tttt"]
expResult = db.getExpResults(analysisIDs=resultID,
                             txnames=txname,
                             dataTypes='upperLimit')[0]
print 'selected result:', expResult

# In[5]:
#!/usr/bin/env python3
""" just test that the fake signal gets injected in the right part of the UL maps """

import math
from smodels.experiment.databaseObj import Database
from smodels.tools.physicsUnits import GeV

# ./ptools/fetchFromClip.py -R rundir.frozen1 --database
db = Database("default.pcl")
print("db", db.databaseVersion)
er = db.getExpResults(["CMS-SUS-19-006"])[0]
ds = er.datasets[0]
print(ds.txnameList)
txn = ds.txnameList[6]
print(txn)


def distance(mass):
    # return math.sqrt ( (mass[0] - 735.)**2 + (mass[1]-162.6)**2  )
    return math.sqrt((mass[0] - 1166.)**2 + (mass[1] - 162.6)**2)


masses = []
for mLSP in range(100, 240, 50):
    for msquark in range(850, 1300, 50):
        masses.append([msquark, mLSP])
        # masses.append (  [[msquark*GeV,mLSP*GeV],[msquark*GeV,mLSP*GeV]] )
for mass in masses:
    mvec = [[mass[0] * GeV, mass[1] * GeV], [mass[0] * GeV, mass[1] * GeV]]
    oUL = txn.getULFor(mvec, expected=False)
    eUL = txn.getULFor(mvec, expected=True)
Beispiel #21
0
runtime.modelFile = 'smodels.share.models.mssm'
#runtime.modelFile = 'mssmQNumbers.slha'

from smodels.theory import decomposer
from smodels.tools.physicsUnits import fb, GeV, TeV
from smodels.theory.theoryPrediction import theoryPredictionsFor
from smodels.experiment.databaseObj import Database
from smodels.tools import coverage
from smodels.tools.smodelsLogging import setLogLevel
from smodels.particlesLoader import BSMList
from smodels.share.models.SMparticles import SMList
from smodels.theory.model import Model
setLogLevel("info")

# Set the path to the database
database = Database("official")


def main():
    """
    Main program. Displays basic use case.
    """
    model = Model(BSMparticles=BSMList, SMparticles=SMList)
    # Path to input file (either a SLHA or LHE file)
    #     lhefile = 'inputFiles/lhe/gluino_squarks.lhe'
    slhafile = 'inputFiles/slha/lightEWinos.slha'
    #     model.updateParticles(inputFile=lhefile)
    model.updateParticles(inputFile=slhafile)

    # Set main options for decomposition
    sigmacut = 0.01 * fb
Beispiel #22
0
    def RunSModelS(self,SLHAFilePath,SummaryFilePath):
        # Set the path to the database
        database = Database("/home/oo1m20/softwares/smodels-1.2.2/smodels-database")

        self.SummaryFilePath = os.path.abspath(SummaryFilePath)

        #Define your model (list of rEven and rOdd particles)
        particlesLoader.load( 'smodels.share.models.secumssm' ) #Make sure all the model particles are up-to-date
    
        # Path to input file (either a SLHA or LHE file)
        self.SLHAFilePath = SLHAFilePath
        slhafile = self.SLHAFilePath
        #lhefile = 'inputFiles/lhe/gluino_squarks.lhe'

        # Set main options for decomposition
        sigmacut = 0.01 * fb
        mingap = 5. * GeV

    
        # Decompose model (use slhaDecomposer for SLHA input or lheDecomposer for LHE input)
        slhaInput = True
        if slhaInput:
            toplist = slhaDecomposer.decompose(slhafile, sigmacut, doCompress=True, doInvisible=True, minmassgap=mingap)
        else:
            toplist = lheDecomposer.decompose(lhefile, doCompress=True,doInvisible=True, minmassgap=mingap)
        # Access basic information from decomposition, using the topology list and topology objects:
        f= open(self.SummaryFilePath,"a+")
        print( "\n Decomposition Results: ", file=f )
        print( "\t  Total number of topologies: %i " %len(toplist), file=f )
        nel = sum([len(top.elementList) for top in toplist])
        print( "\t  Total number of elements = %i " %nel , file=f)
        #Print information about the m-th topology (if it exists):
        m = 2
        if len(toplist) > m:
            top = toplist[m]
            print( "\t\t %i-th topology  = " %m,top,"with total cross section =",top.getTotalWeight(), file=f )
            #Print information about the n-th element in the m-th topology:
            n = 0
            el = top.elementList[n]
            print( "\t\t %i-th element from %i-th topology  = " %(n,m),el, end="", file=f )
            print( "\n\t\t\twith final states =",el.getFinalStates(),"\n\t\t\twith cross section =",el.weight,"\n\t\t\tand masses = ",el.getMasses(), file=f )
            
        # Load the experimental results to be used.
        # In this case, all results are employed.
        listOfExpRes = database.getExpResults()

        # Print basic information about the results loaded.
        # Count the number of loaded UL and EM experimental results:
        nUL, nEM = 0, 0
        for exp in listOfExpRes:
            expType = exp.getValuesFor('dataType')[0]
            if expType == 'upperLimit':
                nUL += 1
            elif  expType == 'efficiencyMap':
                nEM += 1
        print( "\n Loaded Database with %i UL results and %i EM results " %(nUL,nEM), file=f )

        # Compute the theory predictions for each experimental result and print them:
        print("\n Theory Predictions and Constraints:", file=f)
        rmax = 0.
        bestResult = None
        for expResult in listOfExpRes:
            predictions = theoryPredictionsFor(expResult, toplist, combinedResults=False, marginalize=False)
            if not predictions: continue # Skip if there are no constraints from this result
            print('\n %s ' %expResult.globalInfo.id, file=f)
            for theoryPrediction in predictions:
                dataset = theoryPrediction.dataset
                datasetID = dataset.dataInfo.dataId            
                mass = theoryPrediction.mass
                txnames = [str(txname) for txname in theoryPrediction.txnames]
                PIDs =  theoryPrediction.PIDs         
                print( "------------------------", file=f )
                print( "Dataset = ", datasetID, file=f )   #Analysis name
                print( "TxNames = ", txnames, file=f )  
                print( "Prediction Mass = ",mass, file=f )   #Value for average cluster mass (average mass of the elements in cluster)
                print( "Prediction PIDs = ",PIDs, file=f )   #Value for average cluster mass (average mass of the elements in cluster)
                print( "Theory Prediction = ",theoryPrediction.xsection, file=f )  #Signal cross section
                print( "Condition Violation = ",theoryPrediction.conditions, file=f ) #Condition violation values
              
                # Get the corresponding upper limit:
                print( "UL for theory prediction = ",theoryPrediction.upperLimit, file=f )

                # Compute the r-value
                r = theoryPrediction.getRValue()
                print( "r = ",r , file=f)
                #Compute likelihhod and chi^2 for EM-type results:
                if dataset.dataInfo.dataType == 'efficiencyMap':
                    theoryPrediction.computeStatistics()
                    print( 'Chi2, likelihood=', theoryPrediction.chi2, theoryPrediction.likelihood, file=f )
                if r > rmax:
                    rmax = r
                    bestResult = expResult.globalInfo.id

        # Print the most constraining experimental result
        print( "\nThe largest r-value (theory/upper limit ratio) is ",rmax, file=f )
        if rmax > 1.:
            print( "(The input model is likely excluded by %s)" %bestResult, file=f )
        else:
            print( "(The input model is not excluded by the simplified model results)", file=f )

        f.close()
Beispiel #23
0
#!/usr/bin/env python3
"""
.. module:: databaseLoader
   :synopsis: When running the complete test suite, we need to
              load the database only once

.. moduleauthor:: Wolfgang Waltenberger <*****@*****.**>

"""

import sys
sys.path.insert(0, "../")
from smodels.experiment.databaseObj import Database
from smodels.installation import version
ver = "".join(map(str, version(True)[:3]))
#dbname="./database/db%d0.pcl" % int ( sys.version[0] )
dbname = "http://smodels.hephy.at/database/unittest%s" % ver
database = Database(dbname, discard_zeroes=False)

if __name__ == "__main__":
    print(database)
Beispiel #24
0
# In[1]:

#Set up the path to SModelS installation folder if running on a different folder
import sys, os
sys.path.append(os.path.join(os.getenv("HOME"), "smodels/"))

# In[2]:

from smodels.experiment.databaseObj import Database
from smodels.tools.physicsUnits import GeV

# In[3]:

## Load the database:
dbPath = os.path.join(os.getenv("HOME"), "smodels-database/")
database = Database(dbPath)

# ## How to select results from one publication (or conference note)

# In[6]:

#Select only the CMS SUS-12-028 conference note
expID = ["CMS-SUS-12-028"]

# In[7]:

#Loads the selected analyses
#(The INFO tells you that superseded analyses are not loaded, see below)
results = database.getExpResults(analysisIDs=expID)

# In[9]:
Beispiel #25
0
 if args.prior:
     normalizePrior()
     sys.exit()
 if args.upper_limits and args.efficiencyMaps:
     print("[combiner] -u and -e are mutually exclusive")
     sys.exit()
 from smodels.experiment.databaseObj import Database
 from smodels.theory import decomposer
 from smodels.particlesLoader import BSMList
 from smodels.share.models.SMparticles import SMList
 from smodels.theory.model import Model
 from smodels.tools.physicsUnits import fb
 model = Model(BSMparticles=BSMList, SMparticles=SMList)
 model.updateParticles(inputFile=args.slhafile)
 print("[combiner] loading database", args.database)
 db = Database(args.database)
 print("[combiner] done loading database")
 anaIds = ["CMS-SUS-16-033"]
 anaIds = ["all"]
 dts = ["all"]
 if args.upper_limits:
     dts = ["upperLimit"]
 if args.efficiencyMaps:
     dts = ["efficiencyMap"]
 listOfExpRes = db.getExpResults(analysisIDs=anaIds,
                                 dataTypes=dts,
                                 onlyWithExpected=True)
 smses = decomposer.decompose(model, .01 * fb)
 #print ( "[combiner] decomposed into %d topos" % len(smses) )
 from smodels.theory.theoryPrediction import theoryPredictionsFor
 combiner = Combiner()
Beispiel #26
0
#!/usr/bin/env python3
"""
.. module:: databaseLoader
   :synopsis: When running the complete test suite, we need to
              load the database only once

.. moduleauthor:: Wolfgang Waltenberger <*****@*****.**>

"""

import sys
sys.path.insert(0, "../")
from smodels.experiment.databaseObj import Database

# dbpath = "./database"
# dbpath = "../../smodels-database"
dbpath = "unittest"

database = Database(dbpath, discard_zeroes=False)

if __name__ == "__main__":
    print(database)
Beispiel #27
0
 def testLoadLatest(self):
     dblatest=Database ("latest")
     latestver = dblatest.databaseVersion.replace(".","")
     from databaseLoader import database
     thisver = database.databaseVersion.replace("unittest","").replace(".","")
     self.assertTrue ( latestver[:2]==thisver[:2] )
Beispiel #28
0
from smodels.installation import installDirectory
from smodels.tools.physicsUnits import fb, GeV
from smodels.theory.theoryPrediction import theoryPredictionsFor
from smodels.experiment.databaseObj import Database

# In[3]:

#Define the SLHA input file name
filename = "%s/inputFiles/slha/gluino_squarks.slha" % installDirectory()

# In[4]:

#Load the database, do the decomposition and compute theory predictions:
#(Look at the theory predictions HowTo to learn how to compute theory predictions)
databasepath = os.path.join(os.getenv("HOME"), "smodels-database/")
database = Database(databasepath)
expResults = database.getExpResults()
topList = slhaDecomposer.decompose(filename,
                                   sigcut=0.03 * fb,
                                   doCompress=True,
                                   doInvisible=True,
                                   minmassgap=5 * GeV)
allThPredictions = [theoryPredictionsFor(exp, topList) for exp in expResults]

# In[5]:

#Print the value of each theory prediction for each experimental
#result and the corresponding upper limit (see the obtain experimental upper limits HowTo to learn how
#to compute the upper limits).
#Also print the expected upper limit, if available
for thPreds in allThPredictions: