def testSelectors(self): from databaseLoader import database validated = database.getExpResults(analysisIDs=['*:8*TeV','CMS-PAS-SUS-15-002','CMS-PAS-SUS-16-024'], useNonValidated = False ) nonval = database.getExpResults(analysisIDs=['*:8*TeV','CMS-PAS-SUS-15-002','CMS-PAS-SUS-16-024'], useNonValidated = True ) # print ( "validated=",len(validated),map ( str, validated ) ) self.assertTrue(len(validated)==8) self.assertTrue(len(nonval)==9)
def testCoordinateTransfInclusive(self): """ test the transformation of data into coordinates, back into data """ #Test with an inclusive result: expRes = database.getExpResults(analysisIDs=["CMS-EXO-13-006"], datasetIDs=['c000'], txnames=["THSCPM4"]) txname = expRes[0].datasets[0].txnameList[0] initial = [[(300. * GeV, 1e-16 * GeV), 100. * GeV], [(300. * GeV, 1e-16 * GeV), 100. * GeV]] coords = txname.txnameData.dataToCoordinates(initial, txname.txnameData._V, txname.txnameData.delta_x) data = txname.txnameData.coordinatesToData(coords, txname.txnameData._V, txname.txnameData.delta_x) data = np.array(data, dtype=object) newInitial = ['*', [300. * GeV, 100. * GeV]] newInitial = np.array(newInitial, dtype=object) self.assertEqual(data.shape, newInitial.shape) dataFlat = np.array([ x.asNumber(GeV) if str(x) != '*' else -1 for x in flattenArray(data) ]) initialFlat = np.array([ x.asNumber(GeV) if str(x) != '*' else -1 for x in flattenArray(newInitial) ]) diff = np.linalg.norm(dataFlat - initialFlat) self.assertAlmostEqual(diff, 0.) #Test with another type of inclusive result: expRes = database.getExpResults(analysisIDs=["CMS-EXO-13-006"], datasetIDs=['c000'], txnames=["THSCPM7"]) txname = expRes[0].datasets[0].txnameList[0] initial = [[(300. * GeV, 1e-16 * GeV), 50. * GeV], [(200. * GeV, 1e-18 * GeV), 100. * GeV, 1. * GeV]] coords = txname.txnameData.dataToCoordinates(initial, txname.txnameData._V, txname.txnameData.delta_x) data = txname.txnameData.coordinatesToData(coords, txname.txnameData._V, txname.txnameData.delta_x) data = np.array(data, dtype=object) newInitial = [[300. * GeV, 50. * GeV], [200. * GeV, 100. * GeV, 1. * GeV]] newInitial = np.array(newInitial, dtype=object) self.assertEqual(data.shape, newInitial.shape) dataFlat = np.array([x.asNumber(GeV) for x in flattenArray(data)]) initialFlat = np.array( [x.asNumber(GeV) for x in flattenArray(newInitial)]) diff = np.linalg.norm(dataFlat - initialFlat) self.assertAlmostEqual(diff, 0.)
def testExpectedFails(self): expRes = database.getExpResults(analysisIDs=["ATLAS-SUSY-2013-05"], datasetIDs=[None], txnames=["T2bb" ] ) txname=expRes[0].datasets[0].txnameList[0] m = [[650.0*GeV, 50.0*GeV], [650.0*GeV, 50.0*GeV]] expected = txname.getValueFor ( m, expected = True ) self.assertTrue(expected is None)
def checkPrediction(self, slhafile, expID, expectedValues, datasetID): reducedModel = [ ptc for ptc in BSMList if abs(ptc.pdg) in [1000011, 1000012] ] model = Model(reducedModel, SMList) model.updateParticles(slhafile) self.configureLogger() smstoplist = decomposer.decompose(model, 0. * fb, doCompress=True, doInvisible=True, minmassgap=5. * GeV) expresults = database.getExpResults(analysisIDs=expID, datasetIDs=datasetID) for expresult in expresults: theorypredictions = theoryPredictionsFor(expresult, smstoplist) for pred in theorypredictions: predval = pred.xsection.value expval = expectedValues.pop() delta = expval * 0.01 self.assertAlmostEqual(predval.asNumber(fb), expval, delta=delta) self.assertTrue(len(expectedValues) == 0)
def testCombinedResult(self): predXSecs, rvalues = {}, {} for case in ["T1", "T5", "mixed"]: filename = self.createSLHAFile(case=case) deco = decompose(filename) expRes = database.getExpResults( analysisIDs=["CMS-SUS-16-050-agg"])[0] # print ( "Experimental result: %s" % expRes ) tp = theoryPredictionsFor(expRes, deco, useBestDataset=False, combinedResults=True) for t in tp: predXSecs[case] = t.xsection.value rvalues[case] = t.getRValue(expected=True) if True: os.unlink(filename) ## first test: the theory prediction of the mixed scenario should be 25% of the sum ## 25%, because the total cross section is a fixed quantity, and half of the mixed scenario ## goes into asymmetric branches which we miss out on. self.assertAlmostEqual( (predXSecs["T1"] + predXSecs["T5"]).asNumber(fb), (4 * predXSecs["mixed"]).asNumber(fb), 2) ## second test: the r value of the mixed scenario * 2 must be between the r values ## of the pure scenarios. The factor of two comes from the fact, that we loose 50% ## to asymmetric branches self.assertTrue(rvalues["T5"] < 2 * rvalues["mixed"] < rvalues["T1"])
def testDatabaseInt(self): """Test the database with intermediate states.""" #Create test elements c1 = mssm.c1 n1 = mssm.n1 g = mssm.gluino n1.mass = 100*GeV g.mass = 1000*GeV c1.mass = 500*GeV el1 = Element(info="[[[q,q]],[[q,q]]]", model=finalStates) el2 = Element(info="[[[q,q]],[[q,q]]]", model=finalStates) el1.branches[0].oddParticles = [g,n1] el1.branches[1].oddParticles = [g,n1] el2.branches[0].oddParticles = [c1,n1] el2.branches[1].oddParticles = [c1,n1] #Test result without intermediate states: expRes = database.getExpResults(analysisIDs=["ATLAS-SUSY-2016-081"], datasetIDs=[None], txnames=["T5Disp" ] ) txname=expRes[0].datasets[0].txnameList[0] #el1 should match, since txname requires gluinos newEl = txname.hasElementAs(el1) self.assertEqual(newEl,el1) self.assertNotEqual(newEl,el2) #el2 should not match, since txname requires gluinos newEl = txname.hasElementAs(el2) self.assertEqual(False,newEl)
def runPrinterMain(self, slhafile, mprinter, addTopList=False): """ Main program. Displays basic use case. """ runtime.modelFile = 'mssm' reload(particlesLoader) #Set main options for decomposition: sigmacut = 0.03 * fb mingap = 5. * GeV """ Decompose model """ model = Model(BSMList, SMList) model.updateParticles(slhafile) smstoplist = decomposer.decompose(model, sigmacut, doCompress=True, doInvisible=True, minmassgap=mingap) #Add the decomposition result to the printers if addTopList: mprinter.addObj(smstoplist) listOfExpRes = database.getExpResults(analysisIDs=[ '*:8*TeV', 'CMS-PAS-SUS-15-002', 'CMS-PAS-SUS-16-024' ]) # Compute the theory predictions for each analysis allPredictions = [] for expResult in listOfExpRes: predictions = theoryPredictionsFor(expResult, smstoplist) if not predictions: continue allPredictions += predictions._theoryPredictions for theoPred in allPredictions: if theoPred.dataType() == 'efficiencyMap' and hasattr( theoPred, 'expectedUL') and not theoPred.expectedUL is None: theoPred.computeStatistics() maxcond = 0.2 theoryPredictions = TheoryPredictionList(allPredictions, maxcond) mprinter.addObj(theoryPredictions) #Add coverage information: coverageInfo = coverage.Uncovered(smstoplist) mprinter.addObj(coverageInfo) #Add additional information: databaseVersion = database.databaseVersion outputStatus = ioObjects.OutputStatus( [1, 'Input file ok'], slhafile, { 'sigmacut': sigmacut.asNumber(fb), 'minmassgap': mingap.asNumber(GeV), 'maxcond': maxcond }, databaseVersion) outputStatus.status = 1 mprinter.addObj(outputStatus) mprinter.flush()
def testGoodFile(self): filename = "./testFiles/slha/lightEWinos.slha" topolist = slhaDecomposer.decompose(filename,doCompress=True, doInvisible=True, minmassgap = 5*GeV) analyses = database.getExpResults (txnames=["TChiWZoff"]) theoryPrediction = theoryPredictionsFor(analyses[0], topolist)[0] conditionViolation = theoryPrediction.conditions self.assertEqual(conditionViolation['Cgtr([[[mu+,mu-]],[[l,nu]]],[[[e+,e-]],[[l,nu]]])'],0.)
def testExpectedFails(self): expRes = database.getExpResults(analysisIDs=["ATLAS-SUSY-2013-05"], datasetIDs=[None], txnames=["T2bb"]) txname = expRes[0].datasets[0].txnameList[0] m = [[650.0 * GeV, 50.0 * GeV], [650.0 * GeV, 50.0 * GeV]] expected = txname.getValueFor(m, expected=True) self.assertTrue(expected is None)
def testCoordinateTransf(self): """ test the transformation of data into coordinates, back into data """ #Test with a regular mass array: expRes = database.getExpResults(analysisIDs=["ATLAS-SUSY-2013-05"], datasetIDs=[None], txnames=["T2bb"]) txname = expRes[0].datasets[0].txnameList[0] # T2bb initial = [[300. * GeV, 100. * GeV], [300. * GeV, 100. * GeV]] coords = txname.txnameData.dataToCoordinates(initial, txname.txnameData._V, txname.txnameData.delta_x) data = txname.txnameData.coordinatesToData(coords, txname.txnameData._V, txname.txnameData.delta_x) data = np.array(data) initial = np.array(initial) self.assertEqual(data.shape, initial.shape) dataFlat = np.array([x.asNumber(GeV) for x in flattenArray(data)]) initialFlat = np.array( [x.asNumber(GeV) for x in flattenArray(initial)]) diff = np.linalg.norm(dataFlat - initialFlat) self.assertAlmostEqual(diff, 0.) #Test with with a mass array containing tuples: expRes = database.getExpResults(analysisIDs=["ATLAS-SUSY-2016-08"], datasetIDs=[None], txnames=["T5Disp"]) txname = expRes[0].datasets[0].txnameList[0] initial = [[(300. * GeV, 1e-16 * GeV), 100. * GeV], [(300. * GeV, 1e-16 * GeV), 100. * GeV]] coords = txname.txnameData.dataToCoordinates(initial, txname.txnameData._V, txname.txnameData.delta_x) data = txname.txnameData.coordinatesToData(coords, txname.txnameData._V, txname.txnameData.delta_x) data = np.array(data, dtype=object) initial = np.array(initial, dtype=object) self.assertEqual(data.shape, initial.shape) dataFlat = np.array([x.asNumber(GeV) for x in flattenArray(data)]) initialFlat = np.array( [x.asNumber(GeV) for x in flattenArray(initial)]) diff = np.linalg.norm(dataFlat - initialFlat) self.assertAlmostEqual(diff, 0.)
def testOutofBounds(self): expRes = database.getExpResults(analysisIDs=["ATLAS-SUSY-2013-05"], datasetIDs=[None], txnames=["T6bbWW"]) ul = expRes[0].getUpperLimitFor( txname="T6bbWW", mass=[[400 * GeV, 250 * GeV, 100 * GeV], [400 * GeV, 250 * GeV, 100 * GeV]]) self.assertTrue(ul == None)
def testCascadeDecay(self): expRes = database.getExpResults(analysisIDs=["ATLAS-SUSY-2013-05"], datasetIDs=[None], txnames=["T6bbWW"]) ul = expRes[0].getUpperLimitFor( txname="T6bbWW", mass=[[150. * GeV, 140. * GeV, 135. * GeV], [150. * GeV, 140. * GeV, 135. * GeV]]).asNumber(pb) self.assertAlmostEqual(ul, 324.682)
def testExpected(self): expRes = database.getExpResults(analysisIDs=["CMS-PAS-SUS-12-026"], datasetIDs=[None], txnames=[ "T1tttt" ] ) txname=expRes[0].datasets[0].txnameList[0] m = [[650.0*GeV, 50.0*GeV], [650.0*GeV, 50.0*GeV]] observed = txname.getValueFor ( m, expected = False ) expected = txname.getValueFor ( m, expected = True ) delta = abs ( ( ( observed - expected ) / observed ).asNumber() ) self.assertTrue ( delta > .55 and delta < .60 )
def testExpected(self): expRes = database.getExpResults(analysisIDs=["CMS-PAS-SUS-12-026"], datasetIDs=[None], txnames=["T1tttt"]) txname = expRes[0].datasets[0].txnameList[0] m = [[650.0 * GeV, 50.0 * GeV], [650.0 * GeV, 50.0 * GeV]] observed = txname.getValueFor(m, expected=False) expected = txname.getValueFor(m, expected=True) delta = abs(((observed - expected) / observed).asNumber()) self.assertTrue(delta > .55 and delta < .60)
def testDirectDecay(self): expRes = database.getExpResults(analysisIDs=["ATLAS-SUSY-2013-05"], datasetIDs=[None], txnames=["T2bb"]) ul = expRes[0].getUpperLimitFor(txname="T2bb", mass=[[400 * GeV, 100 * GeV], [400 * GeV, 100 * GeV]]).asNumber(pb) self.assertAlmostEqual(ul, 0.0608693)
def runPrinterMain(self, slhafile, mprinter, addTopList=False): """ Main program. Displays basic use case. """ runtime.modelFile = 'mssm' reload(particlesLoader) #Set main options for decomposition: sigmacut = 0.03 * fb mingap = 5. * GeV """ Decompose model (use slhaDecomposer for SLHA input or lheDecomposer for LHE input) """ smstoplist = slhaDecomposer.decompose(slhafile, sigmacut, doCompress=True, doInvisible=True, minmassgap=mingap) #Add the decomposition result to the printers if addTopList: mprinter.addObj(smstoplist) listOfExpRes = database.getExpResults(analysisIDs=[ '*:8*TeV', 'CMS-PAS-SUS-15-002', 'CMS-PAS-SUS-16-024' ]) # Compute the theory predictions for each analysis allPredictions = [] for expResult in listOfExpRes: predictions = theoryPredictionsFor(expResult, smstoplist) if not predictions: continue allPredictions += predictions._theoryPredictions maxcond = 0.2 results = ioObjects.ResultList(allPredictions, maxcond) mprinter.addObj(results) #Add coverage information: coverageInfo = coverage.Uncovered(smstoplist) mprinter.addObj(coverageInfo) #Add additional information: databaseVersion = database.databaseVersion outputStatus = ioObjects.OutputStatus( [1, 'Input file ok'], slhafile, { 'sigmacut': sigmacut.asNumber(fb), 'minmassgap': mingap.asNumber(GeV), 'maxcond': maxcond }, databaseVersion) outputStatus.status = 1 mprinter.addObj(outputStatus) mprinter.flush()
def testInterpolation(self): # print database expRes = database.getExpResults(analysisIDs=["ATLAS-SUSY-2013-05"], datasetIDs=[None], txnames=["T2bb" ] ) #expRes=listOfExpRes[0] # ATLAS-SUSY-2013-05 txname=expRes[0].datasets[0].txnameList[0] # T2bb result=txname.txnameData.getValueFor( [[ 300.*GeV,100.*GeV], [ 300.*GeV,100.*GeV] ]) self.assertAlmostEqual( result.asNumber(pb),0.162457 ) result=txname.txnameData.getValueFor( [[ 300.*GeV,125.*GeV], [ 300.*GeV,125.*GeV] ]) self.assertAlmostEqual( result.asNumber(pb),0.237745 )
def testIntegration(self): slhafile = './testFiles/slha/simplyGluino.slha' self.configureLogger() smstoplist = slhaDecomposer.decompose(slhafile, .1*fb, doCompress=True, doInvisible=True, minmassgap=5.*GeV) listofanalyses = database.getExpResults( analysisIDs= [ "ATLAS-SUSY-2013-02", "CMS-SUS-13-012" ], txnames = [ "T1" ] ) if type(listofanalyses) != list: listofanalyses= [ listofanalyses] for analysis in listofanalyses: self.checkAnalysis(analysis,smstoplist)
def testInterpolation(self): # print database expRes = database.getExpResults(analysisIDs=["ATLAS-SUSY-2013-05"], datasetIDs=[None], txnames=["T2bb"]) #expRes=listOfExpRes[0] # ATLAS-SUSY-2013-05 txname = expRes[0].datasets[0].txnameList[0] # T2bb result = txname.txnameData.getValueFor([[300. * GeV, 100. * GeV], [300. * GeV, 100. * GeV]]) self.assertAlmostEqual(result.asNumber(pb), 0.162457) result = txname.txnameData.getValueFor([[300. * GeV, 125. * GeV], [300. * GeV, 125. * GeV]]) self.assertAlmostEqual(result.asNumber(pb), 0.237745)
def testGoodFile(self): listOfIDs = {'ATLAS-CONF-2013-037': [31, 32, 33, 34, 27, 28, 29, 30], 'ATLAS-SUSY-2013-05' : [26]} filename = "./testFiles/slha/higgsinoStop.slha" topoList = slhaDecomposer.decompose(filename,doCompress = True, doInvisible=True, minmassgap = 5*GeV) resultlist = database.getExpResults(analysisIDs=['*:8*TeV','CMS-PAS-SUS-15-002','CMS-PAS-SUS-16-024']) for res in resultlist: theorypredictions = theoryPredictionsFor(res, topoList) if not theorypredictions: continue self.assertEqual(len(theorypredictions),1) tpIDs = theorypredictions[0].IDs self.assertEqual(sorted(tpIDs),sorted(listOfIDs[res.globalInfo.id]))
def testGoodFile(self): filename = "./testFiles/slha/lightEWinos.slha" topolist = slhaDecomposer.decompose(filename, doCompress=True, doInvisible=True, minmassgap=5 * GeV) analyses = database.getExpResults(txnames=["TChiWZoff"]) theoryPrediction = theoryPredictionsFor(analyses[0], topolist)[0] conditionViolation = theoryPrediction.conditions self.assertEqual( conditionViolation[ 'Cgtr([[[mu+,mu-]],[[l,nu]]],[[[e+,e-]],[[l,nu]]])'], 0.)
def testTxnameDataReweight(self): c1 = mssm.c1.copy() c1.totalwidth = 1e-17 * GeV c1.mass = 100 * GeV gluino = mssm.gluino.copy() gluino.totalwidth = 1e-15 * GeV gluino.mass = 500 * GeV branch1 = Branch() branch1.oddParticles = [gluino, c1] branch2 = Branch() branch2.oddParticles = [gluino, c1] el1 = Element([branch1, branch2]) listofanalyses = database.getExpResults(analysisIDs=["CMS-EXO-13-006"], dataTypes=['efficiencyMap']) exp = listofanalyses[0] ds = [d for d in exp.datasets if d.dataInfo.dataId == 'c000'][0] tx = [t for t in ds.txnameList if str(t) == 'THSCPM3'][0] effDefault = tx.getEfficiencyFor(el1) self.assertAlmostEqual(tx.txnameData.Leff_inner, 0.007) self.assertAlmostEqual(tx.txnameData.Leff_outer, 7.0) self.assertAlmostEqual(effDefault / 1e-5, 5.223348, places=3) ds = [d for d in exp.datasets if d.dataInfo.dataId == 'c000track'][0] tx = [t for t in ds.txnameList if str(t) == 'THSCPM3'][0] effNewSize = tx.getEfficiencyFor(el1) self.assertAlmostEqual(tx.txnameData.Leff_inner, 0.007) self.assertAlmostEqual(tx.txnameData.Leff_outer, 3.0) self.assertAlmostEqual(effNewSize / 1e-5, 7.83466, places=3) branch1 = Branch() branch1.oddParticles = [c1] branch2 = Branch() branch2.oddParticles = [c1] el2 = Element([branch1, branch2]) ds = [d for d in exp.datasets if d.dataInfo.dataId == 'c000'][0] tx = [t for t in ds.txnameList if str(t) == 'THSCPM1'][0] effDefault = tx.getEfficiencyFor(el2) self.assertAlmostEqual(tx.txnameData.Leff_inner, 0.007) self.assertAlmostEqual(tx.txnameData.Leff_outer, 7.0) self.assertAlmostEqual(effDefault, 0.073292924, places=3) ds = [d for d in exp.datasets if d.dataInfo.dataId == 'c000track'][0] tx = [t for t in ds.txnameList if str(t) == 'THSCPM1'][0] effNewSize = tx.getEfficiencyFor(el2) self.assertAlmostEqual(tx.txnameData.Leff_inner, 0.1) self.assertAlmostEqual(tx.txnameData.Leff_outer, 5.0) self.assertAlmostEqual(effNewSize, 0.0897630, places=3)
def checkPrediction(self,slhafile,expID,expectedValues): self.configureLogger() smstoplist = slhaDecomposer.decompose(slhafile, 0.*fb, doCompress=True, doInvisible=True, minmassgap=5.*GeV) expresults = database.getExpResults(analysisIDs= expID) for expresult in expresults: theorypredictions = theoryPredictionsFor(expresult, smstoplist) for pred in theorypredictions: predval=pred.xsection.value expval = expectedValues.pop() delta = expval*0.01 self.assertAlmostEqual(predval.asNumber(fb), expval,delta=delta) self.assertTrue(len(expectedValues) == 0)
def runPrinterMain(self, slhafile, mprinter,addTopList=False): """ Main program. Displays basic use case. """ runtime.modelFile = 'mssm' reload(particlesLoader) #Set main options for decomposition: sigmacut = 0.03 * fb mingap = 5. * GeV """ Decompose model (use slhaDecomposer for SLHA input or lheDecomposer for LHE input) """ smstoplist = slhaDecomposer.decompose(slhafile, sigmacut, doCompress=True, doInvisible=True, minmassgap=mingap ) #Add the decomposition result to the printers if addTopList: mprinter.addObj(smstoplist) listOfExpRes = database.getExpResults(analysisIDs=['*:8*TeV','CMS-PAS-SUS-15-002','CMS-PAS-SUS-16-024']) # Compute the theory predictions for each analysis allPredictions = [] for expResult in listOfExpRes: predictions = theoryPredictionsFor(expResult, smstoplist) if not predictions: continue allPredictions += predictions._theoryPredictions maxcond = 0.2 results = ioObjects.ResultList(allPredictions,maxcond) mprinter.addObj(results) #Add coverage information: coverageInfo = coverage.Uncovered(smstoplist) mprinter.addObj(coverageInfo) #Add additional information: databaseVersion = database.databaseVersion outputStatus = ioObjects.OutputStatus([1,'Input file ok'], slhafile, {'sigmacut' : sigmacut.asNumber(fb), 'minmassgap' : mingap.asNumber(GeV), 'maxcond': maxcond }, databaseVersion) outputStatus.status = 1 mprinter.addObj(outputStatus) mprinter.flush()
def testOutsidePlane(self): expRes = database.getExpResults( analysisIDs=["ATLAS-SUSY-2013-05"], txnames=["T2bb" ] ) # expRes=listOfExpRes[0] # ATLAS-SUSY-2013-05 txname=expRes[0].datasets[0].txnameList[0] # T6bbWW result=txname.txnameData.getValueFor( [[ 300.*GeV,127.*GeV], [ 300.*GeV,127.5*GeV] ]) self.assertAlmostEqual( result.asNumber(pb),0.24452092 ) result=txname.txnameData.getValueFor( [[ 600.*GeV,120.*GeV], [ 600.*GeV,130.*GeV] ]) self.assertAlmostEqual( result.asNumber(pb),0.0197154 ) result=txname.txnameData.getValueFor( [[ 300.*GeV,120.*GeV], [ 300.*GeV,130.*GeV] ]) self.assertTrue ( result == None )
def testOutsidePlane(self): expRes = database.getExpResults(analysisIDs=["ATLAS-SUSY-2013-05"], txnames=["T2bb"]) # expRes=listOfExpRes[0] # ATLAS-SUSY-2013-05 txname = expRes[0].datasets[0].txnameList[0] # T6bbWW result = txname.txnameData.getValueFor([[300. * GeV, 127. * GeV], [300. * GeV, 127.5 * GeV]]) self.assertAlmostEqual(result.asNumber(pb), 0.24452092) result = txname.txnameData.getValueFor([[600. * GeV, 120. * GeV], [600. * GeV, 130. * GeV]]) self.assertAlmostEqual(result.asNumber(pb), 0.0197154) result = txname.txnameData.getValueFor([[300. * GeV, 120. * GeV], [300. * GeV, 130. * GeV]]) self.assertTrue(result == None)
def test6D(self): # print database expRes = database.getExpResults(analysisIDs=["ATLAS-SUSY-2013-05"], txnames=[ "T6bbWW" ] ) # expRes=listOfExpRes[0] # ATLAS-SUSY-2013-05 txname=expRes[0].datasets[0].txnameList[0] # T6bbWW result=txname.txnameData.getValueFor( [[ 300.*GeV,105.*GeV,100.*GeV], [ 300.*GeV,105.*GeV,100.*GeV] ]) self.assertAlmostEqual( result.asNumber(pb),0.176266 ) result=txname.txnameData.getValueFor( [[ 300.*GeV,270.*GeV,200.*GeV], [ 300.*GeV,270.*GeV,200.*GeV] ]) self.assertAlmostEqual( result.asNumber(pb), 87.0403 ) result=txname.txnameData.getValueFor( [[ 300.*GeV,270.*GeV,200.*GeV], [ 300.*GeV,271.*GeV,200.*GeV] ]) self.assertAlmostEqual( result.asNumber(pb), 88.6505675 )
def testCache(self): Cache.n_stored = 10 Cache.reset() expRes = database.getExpResults(analysisIDs=["ATLAS-SUSY-2013-05"], datasetIDs=[None], txnames=["T2bb" ] ) txname=expRes[0].datasets[0].txnameList[0] # T2bb massesvec = [] for i in range(170,290,10): massesvec.append ( [ i*GeV, 100*GeV ] ) for masses in massesvec: result=txname.txnameData.getValueFor( [ masses, masses ]) # print masses,result,Cache.size() self.assertEqual(Cache.size(), 7) m = [ [ 270*GeV, 100*GeV], [ 270*GeV, 100*GeV ] ] result=txname.txnameData.getValueFor( m ) self.assertAlmostEqual(result.asNumber ( fb ) , 459.658)
def testGoodFile(self): filename = "./testFiles/slha/lightEWinos.slha" model = Model(BSMList, SMList) model.updateParticles(filename) topolist = decomposer.decompose(model, sigmacut=0.1 * fb, doCompress=True, doInvisible=True, minmassgap=5 * GeV) analyses = database.getExpResults(txnames=["TChiWZoff"], analysisIDs='ATLAS-SUSY-2013-12') theoryPrediction = theoryPredictionsFor(analyses[0], topolist)[0] conditionViolation = theoryPrediction.conditions self.assertEqual( conditionViolation[ 'Cgtr([[[mu+,mu-]],[[l,nu]]],[[[e+,e-]],[[l,nu]]])'], 0.)
def test6D(self): # print database expRes = database.getExpResults(analysisIDs=["ATLAS-SUSY-2013-05"], txnames=["T6bbWW"]) # expRes=listOfExpRes[0] # ATLAS-SUSY-2013-05 txname = expRes[0].datasets[0].txnameList[0] # T6bbWW result = txname.txnameData.getValueFor( [[300. * GeV, 105. * GeV, 100. * GeV], [300. * GeV, 105. * GeV, 100. * GeV]]) self.assertAlmostEqual(result.asNumber(pb), 0.176266) result = txname.txnameData.getValueFor( [[300. * GeV, 270. * GeV, 200. * GeV], [300. * GeV, 270. * GeV, 200. * GeV]]) self.assertAlmostEqual(result.asNumber(pb), 87.0403) result = txname.txnameData.getValueFor( [[300. * GeV, 270. * GeV, 200. * GeV], [300. * GeV, 271. * GeV, 200. * GeV]]) self.assertAlmostEqual(result.asNumber(pb), 88.6505675)
def testIntegration(self): slhafile = '../inputFiles/slha/simplyGluino.slha' model = Model(BSMList, SMList) model.updateParticles(slhafile) self.configureLogger() smstoplist = decomposer.decompose(model, .1 * fb, doCompress=True, doInvisible=True, minmassgap=5. * GeV) listofanalyses = database.getExpResults( analysisIDs=["ATLAS-SUSY-2013-02", "CMS-SUS-13-012"], txnames=["T1"]) if type(listofanalyses) != list: listofanalyses = [listofanalyses] for analysis in listofanalyses: self.checkAnalysis(analysis, smstoplist)
def testPredictionInterface(self): """ A simple test to see that the interface in datasetObj and TheoryPrediction to the statistics tools is working correctly """ expRes = database.getExpResults( analysisIDs=['CMS-SUS-13-012'] )[0] slhafile= "./testFiles/slha/simplyGluino.slha" smstoplist = slhaDecomposer.decompose( slhafile ) prediction = theoryPredictionsFor(expRes, smstoplist,deltas_rel=0.)[0] pred_signal_strength = prediction.xsection.value prediction.computeStatistics() ill = math.log(prediction.likelihood) ichi2 = prediction.chi2 nsig = (pred_signal_strength*expRes.globalInfo.lumi).asNumber() m = Data(4, 2.2, 1.1**2, None, nsignal=nsig,deltas_rel=0.2) computer = LikelihoodComputer(m) dll = math.log(computer.likelihood(nsig, marginalize=False ) ) self.assertAlmostEqual(ill, dll, places=2) dchi2 = computer.chi2( nsig, marginalize=False ) # print ( "dchi2,ichi2",dchi2,ichi2) self.assertAlmostEqual(ichi2, dchi2, places=2)
def testPredictionInterface(self): """ A simple test to see that the interface in datasetObj and TheoryPrediction to the statistics tools is working correctly """ expRes = database.getExpResults(analysisIDs=['CMS-SUS-13-012'])[0] slhafile = "./testFiles/slha/simplyGluino.slha" smstoplist = slhaDecomposer.decompose(slhafile) prediction = theoryPredictionsFor(expRes, smstoplist, deltas_rel=0.)[0] pred_signal_strength = prediction.xsection.value prediction.computeStatistics() ill = math.log(prediction.likelihood) ichi2 = prediction.chi2 nsig = (pred_signal_strength * expRes.globalInfo.lumi).asNumber() m = Data(4, 2.2, 1.1**2, None, nsignal=nsig, deltas_rel=0.2) computer = LikelihoodComputer(m) dll = math.log(computer.likelihood(nsig, marginalize=False)) self.assertAlmostEqual(ill, dll, places=2) dchi2 = computer.chi2(nsig, marginalize=False) # print ( "dchi2,ichi2",dchi2,ichi2) self.assertAlmostEqual(ichi2, dchi2, places=2)
def testApproxGaussian(self): ## turn experimental features on from smodels.tools import runtime runtime._experimental = True expRes = database.getExpResults(analysisIDs=["CMS-PAS-SUS-12-026"]) self.assertTrue(len(expRes), 1) filename = "./testFiles/slha/T1tttt.slha" model = Model(BSMList, SMList) model.updateParticles(filename) smstoplist = decomposer.decompose(model, sigmacut=0) prediction = theoryPredictionsFor(expRes[0], smstoplist)[0] prediction.computeStatistics() import numpy tot = 0. for i in numpy.arange(0., .2, .02): tot += prediction.getLikelihood(i) c = 0. for i in numpy.arange(0., .2, .02): l = prediction.getLikelihood(i) c += l self.assertAlmostEqual(prediction.likelihood, 1.563288e-35, 3)
def testGoodFile(self): listOfIDs = { 'ATLAS-CONF-2013-037': [31, 32, 33, 34, 27, 28, 29, 30], 'ATLAS-SUSY-2013-05': [26] } filename = "./testFiles/slha/higgsinoStop.slha" topoList = slhaDecomposer.decompose(filename, doCompress=True, doInvisible=True, minmassgap=5 * GeV) resultlist = database.getExpResults(analysisIDs=[ '*:8*TeV', 'CMS-PAS-SUS-15-002', 'CMS-PAS-SUS-16-024' ]) for res in resultlist: theorypredictions = theoryPredictionsFor(res, topoList) if not theorypredictions: continue self.assertEqual(len(theorypredictions), 1) tpIDs = theorypredictions[0].IDs self.assertEqual(sorted(tpIDs), sorted(listOfIDs[res.globalInfo.id]))
def testClusteringEM(self): slhafile = 'testFiles/slha/lightEWinos.slha' model = Model(BSMparticles=BSMList, SMparticles=SMList) model.updateParticles(slhafile) sigmacut = 5. * fb mingap = 5. * GeV toplist = decomposer.decompose(model, sigmacut, doCompress=True, doInvisible=True, minmassgap=mingap) #Test clustering for EM results dataset = database.getExpResults( analysisIDs='CMS-SUS-13-012', datasetIDs='3NJet6_800HT1000_300MHT450')[0].getDataset( '3NJet6_800HT1000_300MHT450') el1 = toplist[0].elementList[0].copy() el2 = toplist[0].elementList[1].copy() el3 = toplist[2].elementList[1].copy() el1.eff = 1. #(Used in clustering) el2.eff = 1. #(Used in clustering) el3.eff = 1. #(Used in clustering) #All elements have the same UL (for EM results) el1._upperLimit = el2._upperLimit = el3._upperLimit = 1. * fb #Clustering should not depend on the mass, width or txname: el1.txname = el2.txname = el3.txname = None el1.mass = [[1000. * GeV, 10 * GeV]] * 2 el2.mass = [[1500. * GeV, 10 * GeV]] * 2 el3.mass = [[200. * GeV, 100 * GeV, 90. * GeV]] * 2 el3.totalwidth = [[1e-10 * GeV, 1e-15 * GeV, 0. * GeV]] * 2 clusters = clusterElements([el1, el2, el3], maxDist=0.2, dataset=dataset) self.assertEqual(len(clusters), 1) self.assertEqual(sorted(clusters[0].elements), sorted([el1, el2, el3])) self.assertEqual(clusters[0].averageElement().mass, None) self.assertEqual(clusters[0].averageElement().totalwidth, None)
def testComplexCluster(self): """ test the mass clusterer """ slhafile = 'testFiles/slha/416126634.slha' model = Model(BSMparticles=BSMList, SMparticles=SMList) model.updateParticles(slhafile) sigmacut = 0.03 * fb mingap = 5. * GeV toplist = decomposer.decompose(model, sigmacut, doCompress=True, doInvisible=True, minmassgap=mingap) #Test clustering for UL results expResult = database.getExpResults(analysisIDs='CMS-SUS-16-039', dataTypes='upperLimit')[0] predictions = theoryPredictionsFor(expResult, toplist, combinedResults=False, marginalize=False) clusterSizes = sorted([len(p.elements) for p in predictions]) self.assertEqual(clusterSizes, [1, 16, 24])
def testClusteringUL(self): slhafile = 'testFiles/slha/lightEWinos.slha' model = Model(BSMparticles=BSMList, SMparticles=SMList) model.updateParticles(slhafile) sigmacut = 5. * fb mingap = 5. * GeV toplist = decomposer.decompose(model, sigmacut, doCompress=True, doInvisible=True, minmassgap=mingap) #Test clustering for UL results dataset = database.getExpResults(analysisIDs='ATLAS-SUSY-2013-02', datasetIDs=None)[0].getDataset(None) el1 = toplist[1].elementList[0] el2 = toplist[1].elementList[1] el3 = toplist[1].elementList[2] weights = [ el.weight.getMaxXsec().asNumber(fb) for el in [el1, el2, el3] ] #Overwrite masses and txname label el1.mass = [[1000. * GeV, 100. * GeV]] * 2 el2.mass = [[1020. * GeV, 100. * GeV]] * 2 el3.mass = [[500. * GeV, 100. * GeV]] * 2 el1.txname = el2.txname = el3.txname = 'T1' el1.eff = 1. #(Used in clustering) el2.eff = 1. #(Used in clustering) el3.eff = 1. #(Used in clustering) #Check clustering with distinct elements clusters = clusterElements([el1, el2, el3], maxDist=0.2, dataset=dataset) self.assertEqual(len(clusters), 2) averageMasses = [ [[(1000. * GeV * weights[0] + 1020. * GeV * weights[1]) / (weights[0] + weights[1]), 100. * GeV]] * 2, el3.mass ] elClusters = [[el1, el2], [el3]] for ic, cluster in enumerate(clusters): avgEl = cluster.averageElement() self.assertEqual(sorted(cluster.elements), sorted(elClusters[ic])) for ibr, br in enumerate(avgEl.mass): for im, m in enumerate(br): self.assertAlmostEqual( m.asNumber(GeV), averageMasses[ic][ibr][im].asNumber(GeV)) self.assertEqual(avgEl.totalwidth, elClusters[ic][0].totalwidth) #Check clustering with distinct elements but no maxDist limit clusters = clusterElements([el1, el2, el3], maxDist=10., dataset=dataset) self.assertEqual(len(clusters), 1) cluster = clusters[0] avgEl = cluster.averageElement() averageMass = [[(1000. * GeV * weights[0] + 1020. * GeV * weights[1] + 500. * GeV * weights[2]) / sum(weights), 100. * GeV] ] * 2 self.assertEqual(sorted(cluster.elements), sorted([el1, el2, el3])) for ibr, br in enumerate(avgEl.mass): for im, m in enumerate(br): self.assertAlmostEqual(m.asNumber(GeV), averageMass[ibr][im].asNumber(GeV)) self.assertEqual(avgEl.totalwidth, el1.totalwidth) #Check clustering where elements have same upper limits, but not the average element: el1._upperLimit = 1. * fb el2._upperLimit = 1. * fb el3._upperLimit = 1. * fb clusters = clusterElements([el1, el2, el3], maxDist=0.1, dataset=dataset) self.assertEqual(len(clusters), 2)