def test_disabledConfig(self): p = performanceregressionconfig.PerformanceRegressionConfig( {"enabled": "0"}) self.assertFalse(p.enabled) p = performanceregressionconfig.PerformanceRegressionConfig( {"enabled": "False"}) self.assertFalse(p.enabled) p = performanceregressionconfig.PerformanceRegressionConfig( {"enabled": "True"}) self.assertTrue(p.enabled)
def testSuccessfullRun(self): with DisposableDirectory( "testdir", True) as testDir, DisposableDirectory("basedir") as baseDir: conf = performanceregressionconfig.PerformanceRegressionConfig({}) runner = performancerunner.PerformanceRunner( lambda d, a: self.runfuncSucceedAlwaysSame(d, a), "basebin", "testbin", testDir.name(), conf) (result, message) = runner.runTestCase( "testname", self.makeCommand("infile.graph", "outfile.graph", "visibility")) self.assertTrue(result)
def test_overrideValues(self): p = performanceregressionconfig.PerformanceRegressionConfig({ "runsPerInstance": "5", "relativeThresholdInPercent": 1.5, "absoluteThresholdInSeconds": "4.1" }) self.assertTrue(p.enabled) self.assertEqual(p.runsPerInstance, 5) self.assertEqual(p.relativeThresholdInPercent, 1.5) self.assertEqual(p.absoluteThresholdInSeconds, 4.1)
def test_missingConfig(self): p = performanceregressionconfig.PerformanceRegressionConfig(None) self.assertFalse(p.enabled)
def test_defaultValues(self): p = performanceregressionconfig.PerformanceRegressionConfig({}) self.assertTrue(p.enabled) self.assertEqual(p.runsPerInstance, 3) self.assertEqual(p.relativeThresholdInPercent, 1) self.assertEqual(p.absoluteThresholdInSeconds, 1)