Exemple #1
0
class AlertProcessorTest(unittest.TestCase):
    def setUp(self):
        self.testInit = TestInit(__file__)
        self.testInit.setLogging(logLevel=logging.DEBUG)
        self.testInit.setDatabaseConnection()
        self.testInit.setSchema(customModules=[
            "WMCore.WMBS", 'WMCore.Agent.Database', "WMCore.ResourceControl"
        ],
                                useDefault=False)
        self.testDir = self.testInit.generateWorkDir()

        self.config = Configuration()
        self.config.section_("Agent")
        self.config.Agent.useMsgService = False
        self.config.Agent.useTrigger = False
        self.config.component_("AlertProcessor")
        self.config.AlertProcessor.componentDir = self.testDir
        self.config.AlertProcessor.address = "tcp://127.0.0.1:5557"
        self.config.AlertProcessor.controlAddr = "tcp://127.0.0.1:5559"
        self.config.section_("CoreDatabase")

        self.config.CoreDatabase.socket = os.environ.get("DBSOCK")
        self.config.CoreDatabase.connectUrl = os.environ.get("DATABASE")

        self.config.AlertProcessor.section_("critical")
        self.config.AlertProcessor.section_("soft")

        self.config.AlertProcessor.critical.level = 5
        self.config.AlertProcessor.soft.level = 0
        self.config.AlertProcessor.soft.bufferSize = 3

        self.config.AlertProcessor.critical.section_("sinks")
        self.config.AlertProcessor.soft.section_("sinks")

    def tearDown(self):
        self.testInit.clearDatabase()
        self.testInit.delWorkDir()

    def testAlertProcessorBasic(self):
        alertProcessor = AlertProcessor(self.config)
        try:
            # alertProcessor.startComponent() causes the flow to stop, Harness.py
            # the method just calls prepareToStart() and waits for ever
            # alertProcessor.startDaemon() no good for this either ... puts everything
            # on background
            alertProcessor.prepareToStart()
        except Exception as ex:
            print ex
            self.fail(str(ex))

        logging.debug(
            "AlertProcessor and its sub-components should be running now ...")
        logging.debug("Going to stop the component ...")

        # stop via component method
        try:
            alertProcessor.stopAlertProcessor()
        except Exception as ex:
            print ex
            self.fail(str(ex))
Exemple #2
0
class CouchTest(unittest.TestCase):
    def setUp(self):
        self.testInit = TestInit(__file__)
        self.testInit.setLogging(logLevel=logging.DEBUG)
        self.testDir = self.testInit.generateWorkDir()
        self.config = getConfig(self.testDir)
        # mock generator instance to communicate some configuration values
        self.generator = utils.AlertGeneratorMock(self.config)
        self.testName = self.id().split('.')[-1]

    def tearDown(self):
        self.testInit.delWorkDir()
        self.generator = None

    def testAlertGeneratorCouchDbSizePollerBasic(self):
        config = getConfig("/tmp")
        try:
            poller = CouchDbSizePoller(config.AlertGenerator.couchDbSizePoller,
                                       self.generator)
        except Exception, ex:
            self.fail("%s: exception: %s" % (self.testName, ex))
        poller.check()  # -> on real system dir may result in permission denied
        poller._dbDirectory = "/dev"
        poller.check()  # -> OK

        # test failing during set up
        poller = CouchDbSizePoller(config.AlertGenerator.couchDbSizePoller,
                                   self.generator)
        poller._query = "nonsense query"
        # this will fail on the above query
        self.assertRaises(Exception, poller._getDbDir)
        poller.check()
class AlertProcessorTest(unittest.TestCase):
    def setUp(self):
        self.testInit = TestInit(__file__)
        self.testInit.setLogging(logLevel = logging.DEBUG)
        self.testInit.setDatabaseConnection()
        self.testInit.setSchema(customModules = ["WMCore.WMBS",'WMCore.Agent.Database',
                                                 "WMCore.ResourceControl"],
                                 useDefault = False)
        self.testDir = self.testInit.generateWorkDir()

        self.config = Configuration()
        self.config.section_("Agent")
        self.config.Agent.useMsgService = False
        self.config.Agent.useTrigger = False
        self.config.component_("AlertProcessor")
        self.config.AlertProcessor.componentDir = self.testDir
        self.config.AlertProcessor.address = "tcp://127.0.0.1:5557"
        self.config.AlertProcessor.controlAddr = "tcp://127.0.0.1:5559"
        self.config.section_("CoreDatabase")

        self.config.CoreDatabase.socket = os.environ.get("DBSOCK")
        self.config.CoreDatabase.connectUrl = os.environ.get("DATABASE")

        self.config.AlertProcessor.section_("critical")
        self.config.AlertProcessor.section_("soft")

        self.config.AlertProcessor.critical.level = 5
        self.config.AlertProcessor.soft.level = 0
        self.config.AlertProcessor.soft.bufferSize = 3

        self.config.AlertProcessor.critical.section_("sinks")
        self.config.AlertProcessor.soft.section_("sinks")


    def tearDown(self):
        self.testInit.clearDatabase()
        self.testInit.delWorkDir()


    def testAlertProcessorBasic(self):
        alertProcessor = AlertProcessor(self.config)
        try:
            # alertProcessor.startComponent() causes the flow to stop, Harness.py
            # the method just calls prepareToStart() and waits for ever
            # alertProcessor.startDaemon() no good for this either ... puts everything
            # on background
            alertProcessor.prepareToStart()
        except Exception, ex:
            print ex
            self.fail(str(ex))

        logging.debug("AlertProcessor and its sub-components should be running now ...")
        logging.debug("Going to stop the component ...")

        # stop via component method
        try:
            alertProcessor.stopAlertProcessor()
        except Exception, ex:
            print ex
            self.fail(str(ex))
Exemple #4
0
class CouchTest(unittest.TestCase):
    def setUp(self):
        self.testInit = TestInit(__file__)
        self.testInit.setLogging(logLevel = logging.DEBUG)
        self.testDir = self.testInit.generateWorkDir()
        self.config = getConfig(self.testDir)
        # mock generator instance to communicate some configuration values
        self.generator = utils.AlertGeneratorMock(self.config)        
        self.testProcesses = []
        self.testName = self.id().split('.')[-1]
         
        
    def tearDown(self):       
        self.testInit.delWorkDir()
        self.generator = None
        utils.terminateProcesses(self.testProcesses)


    def testAlertGeneratorCouchDbSizePollerBasic(self):
        config = getConfig("/tmp")
        try:
            poller = CouchDbSizePoller(config.AlertGenerator.couchDbSizePoller, self.generator)
        except Exception, ex:
            self.fail("%s: exception: %s" % (self.testName, ex))
        poller.check() # -> on real system dir may result in permission denied
        poller._dbDirectory = "/dev"
        poller.check() # -> OK
        
        # test failing during set up
        poller = CouchDbSizePoller(config.AlertGenerator.couchDbSizePoller, self.generator)
        poller._query = "nonsense query"
        poller._dbDirectory = poller._getDbDir()
        poller.check()
        self.assertEquals(poller._dbDirectory, None)        
Exemple #5
0
class MockPluginTest(unittest.TestCase):
    def setUp(self):
        self.testinit = TestInit(__file__)
        self.workdir = self.testinit.generateWorkDir()
        jobList[0]['cache_dir'] = self.workdir

    def tearDown(self):
        self.testinit.delWorkDir()

    def testInit(self):
        wrongconfig = Configuration()
        wrongconfig.section_('BossAir')
        self.assertRaises( BossAirPluginException, MockPlugin, wrongconfig )

        wrongconfig.BossAir.section_('MockPlugin')
        self.assertRaises( BossAirPluginException, MockPlugin, wrongconfig )
        #The config does not contain fakeReport parameter
        self.assertRaises( BossAirPluginException, MockPlugin, wrongconfig )

        #The fakeReport does not exist
        wrongconfig.BossAir.MockPlugin.fakeReport = 'asdf'
        self.assertRaises( BossAirPluginException, MockPlugin, wrongconfig )

    def testTrack(self):
        mp = MockPlugin(config)

        #Check that the job has been scheduled
        self.assertEquals({}, mp.jobsScheduledEnd)

        # Don't be racy
        currentTime = datetime.now()
        #id is the only required parameter in the job dictionary
        res = mp.track( jobList, currentTime )
        self.assertTrue( mp.jobsScheduledEnd.has_key(1L) )
        #check scheduled end (N.B. this includes 20% of random time)
        scheduledEnd = mp.jobsScheduledEnd[1L]
        timeTillJob = scheduledEnd - currentTime
        self.assertTrue( timeTillJob >= timedelta(minutes = TEST_JOB_LEN - 1), \
                         "Time till Job %s !>= Delta %s" % (timeTillJob, \
                         timedelta(minutes = TEST_JOB_LEN - 1)))
        self.assertTrue( timeTillJob <= timedelta(minutes = TEST_JOB_LEN*120/100 + 1), \
                         "Time till Job %s !<= Delta %s" % (timeTillJob, \
                         timedelta(minutes = TEST_JOB_LEN * 120/100 + 1)) )
        #the job is running
        self.assertEquals( 'Running', res[0][0]['status'])
        self.assertEquals( 'Running', res[1][0]['status'])
        self.assertEquals( [], res[2])

        #the job is not running anymore
        mp.jobsScheduledEnd[1L] = datetime(1900,1,1)
        res = mp.track( jobList )
        self.assertEquals( [], res[0])
        self.assertEquals( 'Done', res[1][0]['status'])
        self.assertEquals( 'Done', res[2][0]['status'])

        del mp
Exemple #6
0
class SetupCMSSWPsetTest(unittest.TestCase):
    def setUp(self):
        self.testInit = TestInit(__file__)
        self.testInit.setLogging()
        self.testDir = self.testInit.generateWorkDir()
        sys.path.insert(
            0,
            os.path.join(WMCore.WMBase.getTestBase(),
                         "WMCore_t/WMRuntime_t/Scripts_t"))

    def tearDown(self):
        sys.path.remove(
            os.path.join(WMCore.WMBase.getTestBase(),
                         "WMCore_t/WMRuntime_t/Scripts_t"))
        del sys.modules["WMTaskSpace"]
        self.testInit.delWorkDir()

    def createTestStep(self):
        """
        _createTestStep_

        Create a test step that can be passed to the setup script.

        """
        newStep = WMStep("cmsRun1")
        newStepHelper = CMSSWStepHelper(newStep)
        newStepHelper.setStepType("CMSSW")
        newStepHelper.setGlobalTag("SomeGlobalTag")
        stepTemplate = StepFactory.getStepTemplate("CMSSW")
        stepTemplate(newStep)
        newStep.application.command.configuration = "PSet.py"
        newStep.application.multicore.numberOfCores = "auto"
        return newStepHelper

    def loadProcessFromPSet(self):
        """
        _loadProcessFromPSet_

        This requires changing the working directory,
        do so in a safe manner to encapsulate the change to this method only
        """

        currentPath = os.getcwd()
        loadedProcess = None
        try:
            if not os.path.isdir(self.testDir):
                raise
            os.chdir(self.testDir)
            testFile = "PSet.py"
            pset = imp.load_source('process', testFile)
            loadedProcess = pset.process
        except Exception, ex:
            self.fail(
                "An exception was caught while trying to load the PSet, %s" %
                str(ex))
        finally:
Exemple #7
0
class MockPluginTest(unittest.TestCase):
    def setUp(self):
        self.testinit = TestInit(__file__)
        self.workdir = self.testinit.generateWorkDir()
        jobList[0]['cache_dir'] = self.workdir

    def tearDown(self):
        self.testinit.delWorkDir()

    def testInit(self):
        wrongconfig = Configuration()
        wrongconfig.section_('BossAir')
        self.assertRaises( BossAirPluginException, MockPlugin, wrongconfig )

        wrongconfig.BossAir.section_('MockPlugin')
        self.assertRaises( BossAirPluginException, MockPlugin, wrongconfig )
        #The config does not contain fakeReport parameter
        self.assertRaises( BossAirPluginException, MockPlugin, wrongconfig )

        #The fakeReport does not exist
        wrongconfig.BossAir.MockPlugin.fakeReport = 'asdf'
        self.assertRaises( BossAirPluginException, MockPlugin, wrongconfig )

    def testTrack(self):
        mp = MockPlugin(config)

        #Check that the job has been scheduled
        self.assertEquals({}, mp.jobsScheduledEnd)

        # Don't be racy
        currentTime = datetime.now()
        #id is the only required parameter in the job dictionary
        res = mp.track( jobList, currentTime )
        self.assertTrue( 1 in mp.jobsScheduledEnd )
        #check scheduled end (N.B. this includes 20% of random time)
        scheduledEnd = mp.jobsScheduledEnd[1]
        timeTillJob = scheduledEnd - currentTime
        self.assertTrue( timeTillJob >= timedelta(minutes = TEST_JOB_LEN - 1), \
                         "Time till Job %s !>= Delta %s" % (timeTillJob, \
                         timedelta(minutes = TEST_JOB_LEN - 1)))
        self.assertTrue( timeTillJob <= timedelta(minutes = TEST_JOB_LEN*120/100 + 1), \
                         "Time till Job %s !<= Delta %s" % (timeTillJob, \
                         timedelta(minutes = TEST_JOB_LEN * 120/100 + 1)) )
        #the job is running
        self.assertEquals( 'Running', res[0][0]['status'])
        self.assertEquals( 'Running', res[1][0]['status'])
        self.assertEquals( [], res[2])

        #the job is not running anymore
        mp.jobsScheduledEnd[1] = datetime(1900,1,1)
        res = mp.track( jobList )
        self.assertEquals( [], res[0])
        self.assertEquals( 'Done', res[1][0]['status'])
        self.assertEquals( 'Done', res[2][0]['status'])

        del mp
Exemple #8
0
class WorkQueueTestCase(unittest.TestCase):

    def setSchema(self):
        "this can be override if the schema setting is different"
        self.schema = ["WMCore.WMBS","WMComponent.DBS3Buffer","WMCore.BossAir"]
        self.couchApps = ["WorkQueue"]

    def setUp(self):
        """
        _setUp_

        Setup the database and logging connection.  Try to create all of the
        WMBS tables.  Also add some dummy locations.
        """
        self.queueDB = 'workqueue_t'
        self.queueInboxDB = 'workqueue_t_inbox'
        self.globalQDB = 'workqueue_t_global'
        self.globalQInboxDB = 'workqueue_t_global_inbox'
        self.localQDB = 'workqueue_t_local'
        self.localQInboxDB = 'workqueue_t_local_inbox'
        self.localQDB2 = 'workqueue_t_local2'
        self.localQInboxDB2 = 'workqueue_t_local2_inbox'
        self.configCacheDB = 'workqueue_t_config_cache'

        self.setSchema()
        self.testInit = TestInit('WorkQueueTest')
        self.testInit.setLogging()
        self.testInit.setDatabaseConnection()
        self.testInit.setSchema(customModules = self.schema,
                                useDefault = False)
        self.testInit.setupCouch(self.queueDB, *self.couchApps)
        self.testInit.setupCouch(self.queueInboxDB, *self.couchApps)
        self.testInit.setupCouch(self.globalQDB, *self.couchApps)
        self.testInit.setupCouch(self.globalQInboxDB , *self.couchApps)
        self.testInit.setupCouch(self.localQDB, *self.couchApps)
        self.testInit.setupCouch(self.localQInboxDB, *self.couchApps)
        self.testInit.setupCouch(self.localQDB2, *self.couchApps)
        self.testInit.setupCouch(self.localQInboxDB2, *self.couchApps)
        self.testInit.setupCouch(self.configCacheDB, 'ConfigCache')

        couchServer = CouchServer(os.environ.get("COUCHURL"))
        self.configCacheDBInstance = couchServer.connectDatabase(self.configCacheDB)

        self.workDir = self.testInit.generateWorkDir()
        return

    def tearDown(self):
        """
        _tearDown_

        Drop all the WMBS tables.
        """
        #self.testInit.tearDownCouch()
        self.testInit.clearDatabase()
        self.testInit.delWorkDir()
class SetupCMSSWPsetTest(unittest.TestCase):
    def setUp(self):
        self.testInit = TestInit(__file__)
        self.testInit.setLogging()
        self.testDir = self.testInit.generateWorkDir()
        sys.path.insert(0, os.path.join(WMCore.WMBase.getTestBase(),
                                        "WMCore_t/WMRuntime_t/Scripts_t"))

    def tearDown(self):
        sys.path.remove(os.path.join(WMCore.WMBase.getTestBase(),
                                     "WMCore_t/WMRuntime_t/Scripts_t"))
        del sys.modules["WMTaskSpace"]
        self.testInit.delWorkDir()

    def createTestStep(self):
        """
        _createTestStep_

        Create a test step that can be passed to the setup script.

        """
        newStep = WMStep("cmsRun1")
        newStepHelper = CMSSWStepHelper(newStep)
        newStepHelper.setStepType("CMSSW")
        newStepHelper.setGlobalTag("SomeGlobalTag")
        stepTemplate = StepFactory.getStepTemplate("CMSSW")
        stepTemplate(newStep)
        newStep.application.command.configuration = "PSet.py"
        newStep.application.multicore.numberOfCores = "auto"
        return newStepHelper

    def loadProcessFromPSet(self):
        """
        _loadProcessFromPSet_

        This requires changing the working directory,
        do so in a safe manner to encapsulate the change to this method only
        """

        currentPath = os.getcwd()
        loadedProcess = None
        try:
            if not os.path.isdir(self.testDir):
                raise
            os.chdir(self.testDir)
            testFile = "PSet.py"
            pset = imp.load_source('process', testFile)
            loadedProcess = pset.process
        except Exception, ex:
            self.fail("An exception was caught while trying to load the PSet, %s" % str(ex))
        finally:
Exemple #10
0
class AlertGeneratorTest(unittest.TestCase):
    def setUp(self):
        self.testInit = TestInit(__file__)
        self.testInit.setLogging(logLevel = logging.DEBUG)
        self.testInit.clearDatabase()
        self.testInit.setDatabaseConnection()
        self.testInit.setSchema(customModules = ["WMCore.WMBS",'WMCore.Agent.Database',
                                                 "WMCore.ResourceControl"],
                                useDefault = False)
        self.testDir = self.testInit.generateWorkDir()
        # AlertGenerator instance
        self.generator = None
        
        self.config = getConfig(self.testDir)
         
        self.config.section_("CoreDatabase")
        self.config.CoreDatabase.socket = os.environ.get("DBSOCK")
        self.config.CoreDatabase.connectUrl = os.environ.get("DATABASE")
        
        self.testProcesses = []
        self.testComponentDaemonXml = "/tmp/TestComponent/Daemon.xml" 
                

    def tearDown(self):
        self.testInit.clearDatabase()       
        self.testInit.delWorkDir()
        self.generator = None
        utils.terminateProcesses(self.testProcesses)
        # if the directory and file "/tmp/TestComponent/Daemon.xml" after
        # ComponentsPoller test exist, then delete it
        d = os.path.dirname(self.testComponentDaemonXml)
        if os.path.exists(d):
            shutil.rmtree(d)

        
    def _startComponent(self):
        self.generator = AlertGenerator(self.config)
        try:
            # self.proc.startComponent() causes the flow to stop, Harness.py
            # the method just calls prepareToStart() and waits for ever
            # self.proc.startDaemon() no good for this either ... puts everything
            # on background
            self.generator.prepareToStart()
        except Exception, ex:
            print ex
            self.fail(str(ex))
        print "AlertGenerator and its sub-components should be running now ..."
Exemple #11
0
class FileSinkTest(unittest.TestCase):
    def setUp(self):
        self.testInit = TestInit(__file__)
        self.testInit.setLogging(logLevel=logging.DEBUG)
        self.testDir = self.testInit.generateWorkDir()
        self.config = ConfigSection("file")
        self.config.outputfile = os.path.join(self.testDir,
                                              "FileSinkTestNew.json")

    def tearDown(self):
        self.testInit.delWorkDir()

    def testFileSinkBasic(self):
        sink = FileSink(self.config)
        alerts = []
        nAlerts = 10
        for i in range(nAlerts):
            a = Alert(Source=__file__,
                      Level=i,
                      Timestamp=time.time(),
                      Type="Test")
            alerts.append(a)
        sink.send(alerts)
        # test by reading back
        loadAlerts = sink.load()
        self.assertEqual(len(loadAlerts), nAlerts)

        # Since FileSink implementation depends on line-separated JSONs of
        # Alert instance, test handling new lines in the payload
        alerts = []
        testMsg = "addtional \n message"
        for i in range(10, 20):
            a = Alert(Source=__file__,
                      Level=i,
                      Timestamp=time.time(),
                      Type="Test",
                      Details={"message": testMsg})
            alerts.append(a)
        self.failUnless(os.path.exists(self.config.outputfile))
        sink.send(alerts)
        # test by reading back
        loadAlerts = sink.load()
        self.assertEqual(len(loadAlerts), 20)
        for a in loadAlerts[10:]:
            self.assertEqual(a["Details"]["message"], testMsg)
Exemple #12
0
class MySQLTest(unittest.TestCase):
    def setUp(self):
        self.testInit = TestInit(__file__)
        self.testInit.setLogging(logLevel = logging.DEBUG)
        self.testInit.setDatabaseConnection()
        self.testDir = self.testInit.generateWorkDir()
        self.config = getConfig(self.testDir)
        # mock generator instance to communicate some configuration values
        self.generator = utils.AlertGeneratorMock(self.config)        
        self.testProcesses = []
        self.testName = self.id().split('.')[-1]
         
        
    def tearDown(self):       
        self.testInit.delWorkDir()
        self.generator = None
        utils.terminateProcesses(self.testProcesses)
        

    def testMySQLPollerBasic(self):
        config = getConfig("/tmp")
        generator = utils.AlertGeneratorMock(config)
        # take for instance mysqlCPUPoller configuration here, just need
        # appropriate attributes set
        try:
            poller = MySQLPoller(config.AlertGenerator.mysqlCPUPoller, generator)
        except Exception, ex:
            self.fail("%s: exception: %s" % (self.testName, ex))                                
        # this class would not have defined polling sample function, give it one
        poller.sample = lambda proc: float(12)
        self.assertEqual(len(poller._measurements), 0)
        poller.check()
        self.assertEqual(len(poller._measurements), 1)
        self.assertEqual(poller._measurements[0], 12)
        
        # test handling of a non-existing process
        MySQLPoller._getProcessPID = lambda inst: 1212121212
        poller = MySQLPoller(config.AlertGenerator.mysqlCPUPoller, generator)
        # polling should not even happen so don't have to define sample function
        poller.check()
        self.assertEqual(poller._measurements, None)
        self.assertEqual(poller._dbProcessDetail, None)
Exemple #13
0
class FileSinkTest(unittest.TestCase):
    def setUp(self):
        self.testInit = TestInit(__file__)
        self.testInit.setLogging(logLevel=logging.DEBUG)
        self.testDir = self.testInit.generateWorkDir()
        self.config = ConfigSection("file")
        self.config.outputfile = os.path.join(self.testDir, "FileSinkTestNew.json")

    def tearDown(self):
        self.testInit.delWorkDir()

    def testFileSinkBasic(self):
        sink = FileSink(self.config)
        alerts = []
        nAlerts = 10
        for i in range(nAlerts):
            a = Alert(Source=__file__, Level=i, Timestamp=time.time(), Type="Test")
            alerts.append(a)
        sink.send(alerts)
        # test by reading back
        loadAlerts = sink.load()
        self.assertEqual(len(loadAlerts), nAlerts)

        # Since FileSink implementation depends on line-separated JSONs of
        # Alert instance, test handling new lines in the payload
        alerts = []
        testMsg = "addtional \n message"
        for i in range(10, 20):
            a = Alert(Source=__file__, Level=i, Timestamp=time.time(), Type="Test", Details={"message": testMsg})
            alerts.append(a)
        self.failUnless(os.path.exists(self.config.outputfile))
        sink.send(alerts)
        # test by reading back
        loadAlerts = sink.load()
        self.assertEqual(len(loadAlerts), 20)
        for a in loadAlerts[10:]:
            self.assertEqual(a["Details"]["message"], testMsg)
Exemple #14
0
class ConfigurationExTest(unittest.TestCase):
    """
    test case for Configuration object

    """
    def setUp(self):
        """set up"""
        self.testInit = TestInit(__file__)
        self.testDir = self.testInit.generateWorkDir()
        self.functionSave = "%s/WMCore_Agent_Configuration_t_function.py" % self.testDir

    def tearDown(self):
        """clean up"""
        self.testInit.delWorkDir()

    def testCallableConfigParams(self):
        """ctor"""
        def f():
            return True

        config = Configuration()
        config.section_("SectionF")
        #creating field for the following test
        config.SectionF.aFunction = ''
        #Cannot set a function for plain Configuration objects
        #config.SectionF.__setattr__('aFunction', f)
        self.assertRaises(RuntimeError, config.SectionF.__setattr__,
                          config.SectionF.aFunction, f)

        config = ConfigurationEx()
        config.section_("SectionF")
        #No failures with configurationEx
        config.SectionF.aFunction = f

        #However ConfigurationEx instances cannot be saved
        self.assertRaises(RuntimeError, saveConfigurationFile, config,
                          self.functionSave)
Exemple #15
0
class MySQLTest(unittest.TestCase):
    def setUp(self):
        self.testInit = TestInit(__file__)
        self.testInit.setLogging(logLevel = logging.DEBUG)
        self.testInit.setDatabaseConnection()
        self.testDir = self.testInit.generateWorkDir()
        self.config = getConfig(self.testDir)
        # mock generator instance to communicate some configuration values
        self.generator = utils.AlertGeneratorMock(self.config)
        self.testName = self.id().split('.')[-1]


    def tearDown(self):
        self.testInit.delWorkDir()
        self.generator = None


    def testMySQLPollerBasic(self):
        config = getConfig("/tmp")
        generator = utils.AlertGeneratorMock(config)
        # take for instance mysqlCPUPoller configuration here, just need
        # appropriate attributes set
        try:
            poller = MySQLPoller(config.AlertGenerator.mysqlCPUPoller, generator)
        except Exception, ex:
            self.fail("%s: exception: %s" % (self.testName, ex))
        # this class would not have defined polling sample function, give it one
        poller.sample = lambda proc: float(12)
        self.assertEqual(len(poller._measurements), 0)
        poller.check()
        self.assertEqual(len(poller._measurements), 1)
        self.assertEqual(poller._measurements[0], 12)

        # test handling of a non-existing process
        MySQLPoller._getProcessPID = lambda inst: 1212121212
        self.assertRaises(Exception, MySQLPoller,
                          config.AlertGenerator.mysqlCPUPoller, generator)
class ConfigurationExTest(unittest.TestCase):
    """
    test case for Configuration object

    """
    def setUp(self):
        """set up"""
        self.testInit = TestInit(__file__)
        self.testDir  = self.testInit.generateWorkDir()
        self.functionSave = "%s/WMCore_Agent_Configuration_t_function.py" % self.testDir


    def tearDown(self):
        """clean up"""
        self.testInit.delWorkDir()


    def testCallableConfigParams(self):
        """ctor"""
        def f():
            return True

        config = Configuration()
        config.section_("SectionF")
        #creating field for the following test
        config.SectionF.aFunction = ''
        #Cannot set a function for plain Configuration objects
        #config.SectionF.__setattr__('aFunction', f)
        self.assertRaises(RuntimeError, config.SectionF.__setattr__, config.SectionF.aFunction, f)

        config = ConfigurationEx()
        config.section_("SectionF")
        #No failures with configurationEx
        config.SectionF.aFunction = f

        #However ConfigurationEx instances cannot be saved
        self.assertRaises(RuntimeError, saveConfigurationFile, config, self.functionSave)
Exemple #17
0
class DBSUploadTest(unittest.TestCase):
    """
    TestCase for DBSUpload module

    """
    def setUp(self):
        """
        _setUp_

        setUp function for unittest

        """

        self.testInit = TestInit(__file__)
        self.testInit.setLogging()
        self.testInit.setDatabaseConnection()
        self.testInit.setSchema(customModules = ["WMComponent.DBS3Buffer"],
                                useDefault = False)
        self.testDir = self.testInit.generateWorkDir(deleteOnDestruction = False)
        self.configFile = EmulatorSetup.setupWMAgentConfig()

        myThread = threading.currentThread()
        self.bufferFactory = DAOFactory(package = "WMComponent.DBSBuffer.Database",
                                        logger = myThread.logger,
                                        dbinterface = myThread.dbi)

        self.buffer3Factory = DAOFactory(package = "WMComponent.DBS3Buffer",
                                         logger = myThread.logger,
                                         dbinterface = myThread.dbi)

        locationAction = self.bufferFactory(classname = "DBSBufferFiles.AddLocation")
        locationAction.execute(siteName = "se1.cern.ch")
        locationAction.execute(siteName = "se1.fnal.gov")
        locationAction.execute(siteName = "malpaquet")
        self.dbsUrl = "https://*****:*****@attr("integration")
    def testBasicUpload(self):
        """
        _testBasicUpload_

        Verify that we can successfully upload to DBS3.  Also verify that the
        uploader correctly handles files parentage when uploading.
        """
        self.dbsApi = DbsApi(url = self.dbsUrl)
        config = self.getConfig()
        dbsUploader = DBSUploadPoller(config = config)

        # First test verifies that uploader will poll and then not do anything
        # as the database is empty.
        dbsUploader.algorithm()

        acqEra = "Summer%s" % (int(time.time()))
        parentFiles = self.createParentFiles(acqEra)

        # The algorithm needs to be run twice.  On the first iteration it will
        # create all the blocks and upload one.  On the second iteration it will
        # timeout and upload the second block.
        dbsUploader.algorithm()
        time.sleep(5)
        dbsUploader.algorithm()
        time.sleep(5)

        # Verify the files made it into DBS3.
        self.verifyData(parentFiles[0]["datasetPath"], parentFiles)

        # Inject some more parent files and some child files into DBSBuffer.
        # Run the uploader twice, only the parent files should be added to DBS3.
        (moreParentFiles, childFiles) = \
                          self.createFilesWithChildren(parentFiles, acqEra)
        dbsUploader.algorithm()
        time.sleep(5)
        dbsUploader.algorithm()
        time.sleep(5)

        self.verifyData(parentFiles[0]["datasetPath"],
                        parentFiles + moreParentFiles)

        # Run the uploader another two times to upload the child files.  Verify
        # that the child files were uploaded.
        dbsUploader.algorithm()
        time.sleep(5)
        dbsUploader.algorithm()
        time.sleep(5)

        self.verifyData(childFiles[0]["datasetPath"], childFiles)
        return

    @attr("integration")
    def testDualUpload(self):
        """
        _testDualUpload_

        Verify that the dual upload mode works correctly.
        """
        self.dbsApi = DbsApi(url = self.dbsUrl)
        config = self.getConfig(dbs3UploadOnly = True)
        dbsUploader = DBSUploadPoller(config = config)
        dbsUtil = DBSBufferUtil()

        # First test verifies that uploader will poll and then not do anything
        # as the database is empty.
        dbsUploader.algorithm()

        acqEra = "Summer%s" % (int(time.time()))
        parentFiles = self.createParentFiles(acqEra)
        (moreParentFiles, childFiles) = \
                          self.createFilesWithChildren(parentFiles, acqEra)

        allFiles = parentFiles + moreParentFiles
        allBlocks = []
        for i in range(4):
            blockName = parentFiles[0]["datasetPath"] + "#" + makeUUID()
            dbsBlock = DBSBlock(blockName, "malpaquet", 1)
            dbsBlock.status = "Open"                
            dbsUtil.createBlocks([dbsBlock])
            for file in allFiles[i * 5 : (i * 5) + 5]:
                dbsBlock.addFile(file)
                dbsUtil.setBlockFiles({"block": blockName, "filelfn": file["lfn"]})
                if i < 2:
                    dbsBlock.status = "InDBS"
                dbsUtil.updateBlocks([dbsBlock])
            dbsUtil.updateFileStatus([dbsBlock], "InDBS")
            allBlocks.append(dbsBlock)            

        blockName = childFiles[0]["datasetPath"] + "#" + makeUUID()
        dbsBlock = DBSBlock(blockName, "malpaquet", 1)
        dbsBlock.status = "InDBS"
        dbsUtil.createBlocks([dbsBlock])
        for file in childFiles:
            dbsBlock.addFile(file)
            dbsUtil.setBlockFiles({"block": blockName, "filelfn": file["lfn"]})

        dbsUtil.updateFileStatus([dbsBlock], "InDBS")

        dbsUploader.algorithm()
        time.sleep(5)
        dbsUploader.algorithm()
        time.sleep(5)

        self.verifyData(parentFiles[0]["datasetPath"], parentFiles)

        # Change the status of the rest of the parent blocks so we can upload
        # them and the children.
        for dbsBlock in allBlocks:
            dbsBlock.status = "InDBS"            
            dbsUtil.updateBlocks([dbsBlock])

        dbsUploader.algorithm()
        time.sleep(5)

        self.verifyData(parentFiles[0]["datasetPath"], parentFiles + moreParentFiles)

        # Run the uploader one more time to upload the children.
        dbsUploader.algorithm()
        time.sleep(5)        
    
        self.verifyData(childFiles[0]["datasetPath"], childFiles)
        return

    def testCloseSettingsPerWorkflow(self):
        """
        _testCloseSettingsPerWorkflow_

        Test the block closing mechanics in the DBS3 uploader,
        this uses a fake dbs api to avoid reliance on external services.
        """
        # Signal trapExit that we are a friend
        os.environ["DONT_TRAP_EXIT"] = "True"
        try:
            # Monkey patch the imports of DbsApi
            from WMComponent.DBS3Buffer import DBSUploadPoller as MockDBSUploadPoller
            MockDBSUploadPoller.DbsApi = MockDbsApi
    
            # Set the poller and the dbsUtil for verification
            myThread = threading.currentThread()
            (_, dbsFilePath) = mkstemp(dir = self.testDir)
            self.dbsUrl = dbsFilePath
            config = self.getConfig()
            dbsUploader = MockDBSUploadPoller.DBSUploadPoller(config = config)
            dbsUtil = DBSBufferUtil()
    
            # First test is event based limits and timeout with no new files.
            # Set the files and workflow
            acqEra = "TropicalSeason%s" % (int(time.time()))
            workflowName = 'TestWorkload%s' % (int(time.time()))
            taskPath = '/%s/TestProcessing' % workflowName
            self.injectWorkflow(workflowName, taskPath,
                                MaxWaitTime = 2, MaxFiles = 100,
                                MaxEvents = 150)
            self.createParentFiles(acqEra, nFiles = 20,
                                   workflowName = workflowName,
                                   taskPath = taskPath)
    
            # The algorithm needs to be run twice.  On the first iteration it will
            # create all the blocks and upload one with less than 150 events.
            # On the second iteration the second block is uploaded.
            dbsUploader.algorithm()
            dbsUploader.checkBlocks()
            openBlocks = dbsUtil.findOpenBlocks()
            self.assertEqual(len(openBlocks), 1)
            globalFiles = myThread.dbi.processData("SELECT id FROM dbsbuffer_file WHERE status = 'InDBS'")[0].fetchall()
            notUploadedFiles = myThread.dbi.processData("SELECT id FROM dbsbuffer_file WHERE status = 'NOTUPLOADED'")[0].fetchall()
            self.assertEqual(len(globalFiles), 14)
            self.assertEqual(len(notUploadedFiles), 6)
            # Check the fake DBS for data
            fakeDBS = open(self.dbsUrl, 'r')
            fakeDBSInfo = json.load(fakeDBS)
            fakeDBS.close()
            self.assertEqual(len(fakeDBSInfo), 2)
            for block in fakeDBSInfo:
                self.assertTrue('block_events' not in block['block'])
                self.assertEqual(block['block']['file_count'], 7)
                self.assertEqual(block['block']['open_for_writing'], 0)
                self.assertTrue('close_settings' not in block)
            time.sleep(3)
            dbsUploader.algorithm()
            dbsUploader.checkBlocks()
            openBlocks = dbsUtil.findOpenBlocks()
            self.assertEqual(len(openBlocks), 0)
            fakeDBS = open(self.dbsUrl, 'r')
            fakeDBSInfo = json.load(fakeDBS)
            fakeDBS.close()
            self.assertEqual(len(fakeDBSInfo), 3)
            for block in fakeDBSInfo:
                if block['block']['file_count'] != 6:
                    self.assertEqual(block['block']['file_count'], 7)
                self.assertTrue('block_events' not in block['block'])
                self.assertEqual(block['block']['open_for_writing'], 0)
                self.assertTrue('close_settings' not in block)

            # Now check the limit by size and timeout with new files
            acqEra = "TropicalSeason%s" % (int(time.time()))
            workflowName = 'TestWorkload%s' % (int(time.time()))
            taskPath = '/%s/TestProcessing' % workflowName
            self.injectWorkflow(workflowName, taskPath,
                                MaxWaitTime = 2, MaxFiles = 5,
                                MaxEvents = 200000000)
            self.createParentFiles(acqEra, nFiles = 16,
                                   workflowName = workflowName,
                                   taskPath = taskPath)
            dbsUploader.algorithm()
            dbsUploader.checkBlocks()
            openBlocks = dbsUtil.findOpenBlocks()
            self.assertEqual(len(openBlocks), 1)
            fakeDBS = open(self.dbsUrl, 'r')
            fakeDBSInfo = json.load(fakeDBS)
            fakeDBS.close()
            self.assertEqual(len(fakeDBSInfo), 6)
            for block in fakeDBSInfo:
                if acqEra in block['block']['block_name']:
                    self.assertEqual(block['block']['file_count'], 5)
                self.assertTrue('block_events' not in block['block'])
                self.assertTrue('close_settings' not in block)
                self.assertEqual(block['block']['open_for_writing'], 0)
    
            # Put more files, they will go into the same block and then it will be closed
            # after timeout
            time.sleep(3)
            self.createParentFiles(acqEra, nFiles = 3,
                                   workflowName = workflowName,
                                   taskPath = taskPath)
            dbsUploader.algorithm()
            dbsUploader.checkBlocks()
            openBlocks = dbsUtil.findOpenBlocks()
            self.assertEqual(len(openBlocks), 0)
            fakeDBS = open(self.dbsUrl, 'r')
            fakeDBSInfo = json.load(fakeDBS)
            fakeDBS.close()
            self.assertEqual(len(fakeDBSInfo), 7)
            for block in fakeDBSInfo:
                if acqEra in block['block']['block_name']:
                    if block['block']['file_count'] < 5:
                        self.assertEqual(block['block']['file_count'], 4)
                    else:
                        self.assertEqual(block['block']['file_count'], 5)
                self.assertTrue('block_events' not in block['block'])
                self.assertEqual(block['block']['open_for_writing'], 0)
                self.assertTrue('close_settings' not in block)

            # Finally test size limits
            acqEra = "TropicalSeason%s" % (int(time.time()))
            workflowName = 'TestWorkload%s' % (int(time.time()))
            taskPath = '/%s/TestProcessing' % workflowName
            self.injectWorkflow(workflowName, taskPath,
                                MaxWaitTime = 1, MaxFiles = 500,
                                MaxEvents = 200000000, MaxSize = 2048)
            self.createParentFiles(acqEra, nFiles = 7,
                                   workflowName = workflowName,
                                   taskPath = taskPath)
            dbsUploader.algorithm()
            dbsUploader.checkBlocks()
            time.sleep(2)
            dbsUploader.algorithm()
            dbsUploader.checkBlocks()
    
            self.assertEqual(len(openBlocks), 0)
            fakeDBS = open(self.dbsUrl, 'r')
            fakeDBSInfo = json.load(fakeDBS)
            fakeDBS.close()
            self.assertEqual(len(fakeDBSInfo), 11)
            for block in fakeDBSInfo:
                if acqEra in block['block']['block_name']:
                    if block['block']['file_count'] != 1:
                        self.assertEqual(block['block']['block_size'], 2048)
                        self.assertEqual(block['block']['file_count'], 2)
                self.assertTrue('block_events' not in block['block'])
                self.assertEqual(block['block']['open_for_writing'], 0)
                self.assertTrue('close_settings' not in block)
        except:
            self.fail("We failed at some point in the test")
        finally:
            # We don't trust anyone else with _exit
            del os.environ["DONT_TRAP_EXIT"]
        return
Exemple #18
0
class RuntimeTest(unittest.TestCase):
    """
    _RuntimeTest_

    A unittest to test the WMRuntime/WMSpec/Storage/etc tree
    """


    # This is an integration test
    __integration__ = "Any old bollocks"


    def setUp(self):
        """
        Basic setUp

        """

        self.testInit = TestInit(__file__)
        self.testInit.setLogging()
        self.testInit.setDatabaseConnection()


        self.testDir = self.testInit.generateWorkDir()

        # Random variables
        self.workloadDir = None
        self.unpackDir   = None
        self.initialDir  = os.getcwd()
        self.origPath    = sys.path


        # Create some dirs
        os.makedirs(os.path.join(self.testDir, 'packages'))

        return


    def tearDown(self):
        """
        _tearDown_

        Remove any references you put directly into the modules
        """

        self.testInit.delWorkDir()

        # Clean up imports
        if 'WMSandbox' in sys.modules.keys():
            del sys.modules['WMSandbox']
        if 'WMSandbox.JobIndex' in sys.modules.keys():
            del sys.modules['WMSandbox.JobIndex']


        return


    def createTestWorkload(self, workloadName = 'Test', emulator = True):
        """
        _createTestWorkload_

        Creates a test workload for us to run on, hold the basic necessities.
        """

        workloadDir = os.path.join(self.testDir, workloadName)

        #arguments = getTestArguments()

        #workload = rerecoWorkload("Tier1ReReco", arguments)
        #rereco = workload.getTask("ReReco")

        workload = testWorkload(emulation = emulator)
        rereco = workload.getTask("ReReco")

        # Set environment and site-local-config
        siteConfigPath = os.path.join(workloadDir, 'SITECONF/local/JobConfig/')
        if not os.path.exists(siteConfigPath):
            os.makedirs(siteConfigPath)
        shutil.copy('site-local-config.xml', siteConfigPath)
        environment = rereco.data.section_('environment')
        environment.CMS_PATH = workloadDir

        taskMaker = TaskMaker(workload, workloadDir)
        taskMaker.skipSubscription = True
        taskMaker.processWorkload()

        workload.save(workloadName)

        return workload



    def unpackComponents(self, workload):
        """
        Run the unpacker to build the directories
        IMPORTANT NOTE:
          This is not how we do things on the worker node
          On the worker node we do not run multiple tasks
          So here we create multiple tasks in different directories
          To mimic running on multiple systems

        """

        listOfTasks = getListOfTasks(workload = workload)

        self.unpackDir = os.path.join(self.testDir, 'unpack')

        if not os.path.exists(self.unpackDir):
            os.makedirs(self.unpackDir)

        os.chdir(self.unpackDir)

        sandbox  = workload.data.sandbox

        for task in listOfTasks:
            # We have to create a directory, unpack in it, and then get out
            taskName = task.name()
            taskDir = os.path.join(self.unpackDir, taskName)
            if not os.path.exists(taskDir):
            # Well then we have to make it
                os.makedirs(taskDir)
            os.chdir(taskDir)
            # Now that we're here, run the unpacker

            package  = os.path.join(self.testDir, 'packages', '%sJobPackage.pkl' % (taskName))
            jobIndex = 1

            RunUnpacker(sandbox = sandbox, package = package,
                        jobIndex = jobIndex, jobname = taskName)

            # And go back to where we started
            os.chdir(self.unpackDir)


        os.chdir(self.initialDir)

        return


    def createWMBSComponents(self, workload):
        """
        Create the WMBS Components for this job

        """

        listOfTasks = []
        listOfSubs  = []

        rerecoTask  = None

        for primeTask in workload.taskIterator():
            # There should only be one prime task, and it should be the rerecoTask
            rerecoTask = primeTask
            for task in primeTask.taskIterator():
                listOfTasks.append(task)

        for task in listOfTasks:
            fileset = self.getFileset()
            sub = self.createSubscriptions(task = task,
                                           fileset = fileset)
            #listOfSubs.append(sub)



        return



    def createSubscriptions(self, task, fileset):
        """
        Create a subscription based on a task


        """
        type = task.taskType()
        work = task.makeWorkflow()

        sub = Subscription(fileset = fileset,
                           workflow = work,
                           split_algo = "FileBased",
                           type = type)

        package = self.createWMBSJobs(subscription = sub,
                                      task = task)

        packName = os.path.join(self.testDir, 'packages',
                                '%sJobPackage.pkl' %(task.name()))
        package.save(packName)

        return sub




    def createWMBSJobs(self, subscription, task):
        """
        Create the jobs for WMBS Components
        Send a subscription/task, get back a package.

        """

        splitter = SplitterFactory()
        geneFac  = GeneratorFactory()
        jobfactory = splitter(subscription = subscription,
                              package = "WMCore.DataStructs",
                              generators = geneFac.makeGenerators(task))
        params = task.jobSplittingParameters()
        jobGroups = jobfactory(**params)

        jobID = 1
        package = JobPackage()
        for group in jobGroups:
            for job in group.jobs:
                job['id'] = jobID
                jobID += 1
                package[job['id']] = job

        return package




    def getFileset(self):
        """
        Get a fileset based on the task

        """

        fileset = Fileset(name = 'Merge%s' %(type))


        for i in range(0, random.randint(15,25)):
            # Use the testDir to generate a random lfn
            inpFile = File(lfn = "%s/%s.root" %(self.testDir, makeUUID()),
                           size = random.randint(200000, 1000000),
                           events = random.randint(1000,2000) )
            inpFile.setLocation('Megiddo')
            fileset.addFile(inpFile)


        return fileset



    def runJobs(self, workload):
        """
        This might actually run the job.  Who knows?


        """
        listOfTasks = []

        for primeTask in workload.taskIterator():
            listOfTasks.append(primeTask)
            # Only run primeTasks for now


        for task in listOfTasks:
            jobName = task.name()
            taskDir = os.path.join(self.unpackDir, jobName, 'job')
            os.chdir(taskDir)
            sys.path.append(taskDir)

            # Scream, run around in panic, blow up machine
            print "About to run jobs"
            print taskDir
            miniStartup(dir = taskDir)


            # When exiting, go back to where you started
            os.chdir(self.initialDir)
            sys.path.remove(taskDir)

        return




    @attr('integration')
    def testA_CreateWorkload(self):
        """
        _CreateWorkload_

        Create a workload
        Unpack the workload
        Check for consistency
        """

        workloadName = 'basicWorkload'
        workload     = self.createTestWorkload(workloadName = workloadName)

        self.createWMBSComponents(workload = workload)

        taskNames = []
        for task in getListOfTasks(workload = workload):
            taskNames.append(task.name())

        workloadPath  = os.path.join(self.testDir, workloadName, "TestWorkload")
        siteConfigDir = os.path.join(self.testDir, workloadName, 'SITECONF/local/JobConfig/')


        # Pre-run checks

        # Does it have the right directories?
        dirList = os.listdir(workloadPath)
        self.assertEqual(dirList, ['WMSandbox', 'TestWorkload-Sandbox.tar.bz2'])
        dirList = os.listdir(os.path.join(workloadPath, 'WMSandbox'))
        for taskName in taskNames:
            self.assertTrue(taskName in dirList)

        # Do we have job packages
        for task in taskNames:
            self.assertTrue('%sJobPackage.pkl' % (task) in os.listdir(os.path.join(self.testDir, 'packages')))


        # Does it have the SITECONF?
        self.assertTrue('site-local-config.xml' in os.listdir(siteConfigDir))



        # Now actually see if you can unpack it.
        self.unpackComponents(workload = workload)


        # Check for proper unpacking
        # Check the the task has the right directories,
        # and that the PSetTweaks and WMSandbox directories
        # have the right contents
        taskContents = ['WMSandbox', 'WMCore', 'PSetTweaks']
        PSetContents = ['PSetTweak.pyc', 'CVS', 'PSetTweak.py',
                        '__init__.pyc', 'WMTweak.py', '__init__.py']
        taskSandbox  = ['JobPackage.pcl', 'JobIndex.py', '__init__.py', 'WMWorkload.pkl']
        taskSandbox.extend(taskNames)  # Should have a directory for each task

        for task in taskNames:
            self.assertTrue(task in os.listdir(os.path.join(self.testDir, 'unpack')))
            taskDir = os.path.join(self.testDir, 'unpack', task, 'job')
            self.assertTrue(os.path.isdir(taskDir))
            self.assertEqual(os.listdir(taskDir).sort(), taskContents.sort())
            self.assertEqual(os.listdir(os.path.join(taskDir, 'WMSandbox')).sort(),
                             taskSandbox.sort())
            self.assertEqual(os.listdir(os.path.join(taskDir, 'PSetTweaks')).sort(),
                             PSetContents.sort())


        # And we're done.
        # Assume if we got this far everything is good


        # At the end, copy the directory
        #if os.path.exists('tmpDir'):
        #    shutil.rmtree('tmpDir')
        #shutil.copytree(self.testDir, 'tmpDir')

        return

    @attr('integration')
    def testB_EmulatorTest(self):
        """
        _EmulatorTest_

        This is where things get scary.  We need to not only unpack the job,
        but also ascertain whether it can run locally in emulator mode.

        This requires...uh...emulator emulation.
        """


        # Assume all this works, because we tested it in testA
        workloadName = 'basicWorkload'
        workload     = self.createTestWorkload(workloadName = workloadName)

        self.createWMBSComponents(workload = workload)

        self.unpackComponents(workload = workload)


        self.runJobs(workload = workload)

        # Check the report
        taskDir = os.path.join(self.testDir, 'unpack/ReReco/job/WMTaskSpace')
        report = Report()
        report.load(os.path.join(taskDir, 'Report.0.pkl'))
        cmsReport = report.data.cmsRun1



        # Now validate the report
        self.assertEqual(report.data.ceName, socket.gethostname())
        self.assertEqual(report.data.seName, 'cmssrm.fnal.gov')
        self.assertEqual(report.data.siteName, 'T1_US_FNAL')
        self.assertEqual(report.data.hostName, socket.gethostname())
        self.assertTrue(report.data.completed)

        # Should have status 0 (emulator job)
        self.assertEqual(cmsReport.status, 0)

        # Should have one output module
        self.assertEqual(cmsReport.outputModules, ['TestOutputModule'])

        # It should have one file for input and output
        self.assertEqual(cmsReport.input.PoolSource.files.fileCount, 1)
        self.assertEqual(cmsReport.output.TestOutputModule.files.fileCount, 1)

        # So, um, I guess we're done


        # At the end, copy the directory
        #if os.path.exists('tmpDir'):
        #    shutil.rmtree('tmpDir')
        #shutil.copytree(self.testDir, 'tmpDir')

        return
Exemple #19
0
class DataProcessingTest(unittest.TestCase):
    def setUp(self):
        """
        _setUp_

        Initialize the database.
        """
        self.testInit = TestInit(__file__)
        self.testInit.setLogging()
        self.testInit.setDatabaseConnection()
        self.testInit.setSchema(customModules = ["WMCore.WMBS"],
                                useDefault = False)
        self.testDir = self.testInit.generateWorkDir()
        return

    def tearDown(self):
        """
        _tearDown_

        Clear out the database.
        """
        self.testInit.clearDatabase()
        self.testInit.delWorkDir()
        return

    def testDataProcessing(self):
        """
        _testDataProcessing_

        Create a data processing workflow and verify it installs into WMBS
        correctly. Check that we can drop an output module.
        """
        testArgs = getTestArguments()
        testArgs['TransientOutputModules'] = ['RECOoutput']
        testWorkload = dataProcessingWorkload("TestWorkload", testArgs)
        testWorkload.setSpecUrl("somespec")
        testWorkload.setOwnerDetails("*****@*****.**", "DMWM")

        testWMBSHelper = WMBSHelper(testWorkload, "DataProcessing", "SomeBlock", cachepath = self.testDir)
        testWMBSHelper.createTopLevelFileset()
        testWMBSHelper._createSubscriptionsInWMBS(testWMBSHelper.topLevelTask, testWMBSHelper.topLevelFileset)

        procWorkflow = Workflow(name = "TestWorkload",
                                task = "/TestWorkload/DataProcessing")
        procWorkflow.load()

        self.assertEqual(len(procWorkflow.outputMap.keys()), 3,
                         "Error: Wrong number of WF outputs.")

        goldenOutputMods = ["RECOoutput", "ALCARECOoutput"]
        for goldenOutputMod in goldenOutputMods:
            mergedOutput = procWorkflow.outputMap[goldenOutputMod][0]["merged_output_fileset"]
            unmergedOutput = procWorkflow.outputMap[goldenOutputMod][0]["output_fileset"]

            mergedOutput.loadData()
            unmergedOutput.loadData()

            if goldenOutputMod in testArgs["TransientOutputModules"]:
                self.assertEqual(mergedOutput.name, "/TestWorkload/DataProcessing/unmerged-%s" % goldenOutputMod,
                                 "Error: Merged output fileset is wrong: %s" % mergedOutput.name)
            else:
                self.assertEqual(mergedOutput.name, "/TestWorkload/DataProcessing/DataProcessingMerge%s/merged-Merged" % goldenOutputMod,
                                 "Error: Merged output fileset is wrong: %s" % mergedOutput.name)
            self.assertEqual(unmergedOutput.name, "/TestWorkload/DataProcessing/unmerged-%s" % goldenOutputMod,
                             "Error: Unmerged output fileset is wrong.")

        logArchOutput = procWorkflow.outputMap["logArchive"][0]["merged_output_fileset"]
        unmergedLogArchOutput = procWorkflow.outputMap["logArchive"][0]["output_fileset"]
        logArchOutput.loadData()
        unmergedLogArchOutput.loadData()

        self.assertEqual(logArchOutput.name, "/TestWorkload/DataProcessing/unmerged-logArchive",
                         "Error: LogArchive output fileset is wrong.")
        self.assertEqual(unmergedLogArchOutput.name, "/TestWorkload/DataProcessing/unmerged-logArchive",
                         "Error: LogArchive output fileset is wrong.")

        for goldenOutputMod in goldenOutputMods:
            if goldenOutputMod in testArgs["TransientOutputModules"]:
                # No merge for this output module
                continue
            mergeWorkflow = Workflow(name = "TestWorkload",
                                     task = "/TestWorkload/DataProcessing/DataProcessingMerge%s" % goldenOutputMod)
            mergeWorkflow.load()

            self.assertEqual(len(mergeWorkflow.outputMap.keys()), 2,
                             "Error: Wrong number of WF outputs.")

            mergedMergeOutput = mergeWorkflow.outputMap["Merged"][0]["merged_output_fileset"]
            unmergedMergeOutput = mergeWorkflow.outputMap["Merged"][0]["output_fileset"]

            mergedMergeOutput.loadData()
            unmergedMergeOutput.loadData()

            self.assertEqual(mergedMergeOutput.name, "/TestWorkload/DataProcessing/DataProcessingMerge%s/merged-Merged" % goldenOutputMod,
                             "Error: Merged output fileset is wrong.")
            self.assertEqual(unmergedMergeOutput.name, "/TestWorkload/DataProcessing/DataProcessingMerge%s/merged-Merged" % goldenOutputMod,
                             "Error: Unmerged output fileset is wrong.")

            logArchOutput = mergeWorkflow.outputMap["logArchive"][0]["merged_output_fileset"]
            unmergedLogArchOutput = mergeWorkflow.outputMap["logArchive"][0]["output_fileset"]
            logArchOutput.loadData()
            unmergedLogArchOutput.loadData()

            self.assertEqual(logArchOutput.name, "/TestWorkload/DataProcessing/DataProcessingMerge%s/merged-logArchive" % goldenOutputMod,
                             "Error: LogArchive output fileset is wrong: %s" % logArchOutput.name)
            self.assertEqual(unmergedLogArchOutput.name, "/TestWorkload/DataProcessing/DataProcessingMerge%s/merged-logArchive" % goldenOutputMod,
                             "Error: LogArchive output fileset is wrong.")

        topLevelFileset = Fileset(name = "TestWorkload-DataProcessing-SomeBlock")
        topLevelFileset.loadData()

        procSubscription = Subscription(fileset = topLevelFileset, workflow = procWorkflow)
        procSubscription.loadData()

        self.assertEqual(procSubscription["type"], "Processing",
                         "Error: Wrong subscription type.")
        self.assertEqual(procSubscription["split_algo"], "LumiBased",
                         "Error: Wrong split algo.")

        unmergedReco = Fileset(name = "/TestWorkload/DataProcessing/unmerged-RECOoutput")
        unmergedReco.loadData()
        recoMergeWorkflow = Workflow(name = "TestWorkload",
                                     task = "/TestWorkload/DataProcessing/DataProcessingMergeRECOoutput")
        # No merge workflow should exist in WMBS
        self.assertRaises(IndexError, recoMergeWorkflow.load)

        unmergedAlca = Fileset(name = "/TestWorkload/DataProcessing/unmerged-ALCARECOoutput")
        unmergedAlca.loadData()
        alcaMergeWorkflow = Workflow(name = "TestWorkload",
                                     task = "/TestWorkload/DataProcessing/DataProcessingMergeALCARECOoutput")
        alcaMergeWorkflow.load()
        mergeSubscription = Subscription(fileset = unmergedAlca, workflow = alcaMergeWorkflow)
        mergeSubscription.loadData()

        self.assertEqual(mergeSubscription["type"], "Merge",
                         "Error: Wrong subscription type.")
        self.assertEqual(mergeSubscription["split_algo"], "ParentlessMergeBySize",
                         "Error: Wrong split algo.")

        for procOutput in ["RECOoutput", "ALCARECOoutput"]:
            unmerged = Fileset(name = "/TestWorkload/DataProcessing/unmerged-%s" % procOutput)
            unmerged.loadData()
            cleanupWorkflow = Workflow(name = "TestWorkload",
                                      task = "/TestWorkload/DataProcessing/DataProcessingCleanupUnmerged%s" % procOutput)
            cleanupWorkflow.load()
            cleanupSubscription = Subscription(fileset = unmerged, workflow = cleanupWorkflow)
            cleanupSubscription.loadData()

            self.assertEqual(cleanupSubscription["type"], "Cleanup",
                             "Error: Wrong subscription type.")
            self.assertEqual(cleanupSubscription["split_algo"], "SiblingProcessingBased",
                             "Error: Wrong split algo.")

        procLogCollect = Fileset(name = "/TestWorkload/DataProcessing/unmerged-logArchive")
        procLogCollect.loadData()
        procLogCollectWorkflow = Workflow(name = "TestWorkload",
                                          task = "/TestWorkload/DataProcessing/LogCollect")
        procLogCollectWorkflow.load()
        logCollectSub = Subscription(fileset = procLogCollect, workflow = procLogCollectWorkflow)
        logCollectSub.loadData()

        self.assertEqual(logCollectSub["type"], "LogCollect",
                         "Error: Wrong subscription type.")
        self.assertEqual(logCollectSub["split_algo"], "MinFileBased",
                         "Error: Wrong split algo.")

        procLogCollect = Fileset(name = "/TestWorkload/DataProcessing/DataProcessingMergeALCARECOoutput/merged-logArchive")
        procLogCollect.loadData()
        procLogCollectWorkflow = Workflow(name = "TestWorkload",
                                          task = "/TestWorkload/DataProcessing/DataProcessingMergeALCARECOoutput/DataProcessingALCARECOoutputMergeLogCollect")
        procLogCollectWorkflow.load()
        logCollectSub = Subscription(fileset = procLogCollect, workflow = procLogCollectWorkflow)
        logCollectSub.loadData()

        self.assertEqual(logCollectSub["type"], "LogCollect",
                         "Error: Wrong subscription type.")
        self.assertEqual(logCollectSub["split_algo"], "MinFileBased",
                         "Error: Wrong split algo.")

        return
Exemple #20
0
class StageOutTest(unittest.TestCase):
    def setUp(self):
        self.testInit = TestInit(__file__)
        self.testDir = self.testInit.generateWorkDir()

        # shut up SiteLocalConfig
        os.environ['CMS_PATH'] = os.getcwd()
        workload = copy.deepcopy(testWorkloads.workload)
        task = workload.getTask("Production")
        step = task.getStep("stageOut1")
        # want to get the cmsstep so I can make the Report
        cmsstep = task.getStep('cmsRun1')
        self.cmsstepdir = os.path.join(self.testDir, 'cmsRun1')
        os.mkdir(self.cmsstepdir)
        open(os.path.join(self.cmsstepdir, '__init__.py'), 'w').close()
        open(os.path.join(self.cmsstepdir, 'Report.pkl'), 'w').close()

        cmsbuilder = CMSSWBuilder.CMSSW()
        cmsbuilder(cmsstep.data, 'Production', self.cmsstepdir)
        realstep = StageOutTemplate.StageOutStepHelper(step.data)
        realstep.disableRetries()
        self.realstep = realstep
        self.stepDir = os.path.join(self.testDir, 'stepdir')
        os.mkdir(self.stepDir)
        builder = StageOutBuilder.StageOut()
        builder(step.data, 'Production', self.stepDir)

        # stolen from CMSSWExecutor_t. thanks, dave

        # first, delete all the sandboxen and taskspaces
        #    because of caching, this leaks from other tests in other files
        #    this sucks because the other tests are using sandboxen that
        #    are deleted after the test is over, which causes theses tests
        #    to break
        modsToDelete = []
        # not sure what happens if you delete from
        # an arrey you're iterating over. doing it in
        # two steps
        for modname in sys.modules.keys():
            # need to blow away things in sys.modules, otherwise
            # they are cached and we look at old taskspaces
            if modname.startswith('WMTaskSpace'):
                modsToDelete.append(modname)
            if modname.startswith('WMSandbox'):
                modsToDelete.append(modname)
        for modname in modsToDelete:
            try:
                reload(sys.modules[modname])
            except Exception:
                pass
            del sys.modules[modname]

        self.oldpath = sys.path[:]
        self.testInit = TestInit(__file__)

        self.testDir = self.testInit.generateWorkDir()
        self.job = Job(name="/UnitTests/DeleterTask/DeleteTest-test-job")
        shutil.copyfile('/etc/hosts', os.path.join(self.testDir, 'testfile'))

        self.workload = newWorkload("UnitTests")
        self.task = self.workload.newTask("DeleterTask")

        cmsswHelper = self.task.makeStep("cmsRun1")
        cmsswHelper.setStepType('CMSSW')
        stepHelper = cmsswHelper.addStep("DeleteTest")
        stepHelper.setStepType('StageOut')

        self.cmsswstep = cmsswHelper.data
        self.cmsswHelper = cmsswHelper

        self.stepdata = stepHelper.data
        self.stephelp = StageOutTemplate.StageOutStepHelper(stepHelper.data)
        self.task.applyTemplates()

        self.executor = StepFactory.getStepExecutor(self.stephelp.stepType())
        taskMaker = TaskMaker(self.workload, os.path.join(self.testDir))
        taskMaker.skipSubscription = True
        taskMaker.processWorkload()

        self.task.build(os.path.join(self.testDir, 'UnitTests'))

        sys.path.insert(0, self.testDir)
        sys.path.insert(0, os.path.join(self.testDir, 'UnitTests'))

        #        binDir = inspect.getsourcefile(ModuleLocator)
        #        binDir = binDir.replace("__init__.py", "bin")
        #
        #        if not binDir in os.environ['PATH']:
        #            os.environ['PATH'] = "%s:%s" % (os.environ['PATH'], binDir)
        open(os.path.join(self.testDir, 'UnitTests', '__init__.py'),
             'w').close()
        shutil.copyfile(
            os.path.join(os.path.dirname(__file__), 'MergeSuccess.pkl'),
            os.path.join(self.testDir, 'UnitTests', 'WMTaskSpace', 'cmsRun1',
                         'Report.pkl'))

    def tearDown(self):
        sys.path = self.oldpath[:]
        self.testInit.delWorkDir()

        # making double sure WMTaskSpace and WMSandbox are gone
        modsToDelete = []
        # not sure what happens if you delete from
        # an arrey you're iterating over. doing it in
        # two steps
        for modname in sys.modules.keys():
            # need to blow away things in sys.modules, otherwise
            # they are cached and we look at old taskspaces
            if modname.startswith('WMTaskSpace'):
                modsToDelete.append(modname)
            if modname.startswith('WMSandbox'):
                modsToDelete.append(modname)
        for modname in modsToDelete:
            try:
                reload(sys.modules[modname])
            except Exception:
                pass
            del sys.modules[modname]
        myThread = threading.currentThread()
        if hasattr(myThread, "factory"):
            myThread.factory = {}

    def makeReport(self, fileName):
        myReport = Report('oneitem')
        myReport.addStep('stageOut1')
        myReport.addOutputModule('module1')
        myReport.addOutputModule('module2')
        myReport.addOutputFile('module1', {
            'lfn': 'FILE1',
            'size': 1,
            'events': 1
        })
        myReport.addOutputFile('module2', {
            'lfn': 'FILE2',
            'size': 1,
            'events': 1
        })
        myReport.addOutputFile('module2', {
            'lfn': 'FILE3',
            'size': 1,
            'events': 1
        })
        myReport.persist(fileName)

    def testExecutorDoesntDetonate(self):
        myReport = Report()
        myReport.unpersist(
            os.path.join(self.testDir, 'UnitTests', 'WMTaskSpace', 'cmsRun1',
                         'Report.pkl'))
        myReport.data.cmsRun1.status = 1
        myReport.persist(
            os.path.join(self.testDir, 'UnitTests', 'WMTaskSpace', 'cmsRun1',
                         'Report.pkl'))

        executor = StageOutExecutor.StageOut()

        executor.initialise(self.stepdata, self.job)
        self.setLocalOverride(self.stepdata)
        executor.step = self.stepdata
        executor.execute()
        self.assertFalse(os.path.exists(os.path.join(self.testDir, 'hosts')))
        self.assertFalse(
            os.path.exists(os.path.join(self.testDir, 'test1', 'hosts')))
        return

    def testUnitTestBackend(self):
        myReport = Report()
        myReport.unpersist(
            os.path.join(self.testDir, 'UnitTests', 'WMTaskSpace', 'cmsRun1',
                         'Report.pkl'))
        myReport.data.cmsRun1.status = 1
        myReport.persist(
            os.path.join(self.testDir, 'UnitTests', 'WMTaskSpace', 'cmsRun1',
                         'Report.pkl'))

        executor = StageOutExecutor.StageOut()
        helper = StageOutTemplate.StageOutStepHelper(self.stepdata)
        helper.addOverride(override='command', overrideValue='test-win')
        helper.addOverride(override='option', overrideValue='')
        helper.addOverride(override='phedex-node',
                           overrideValue='charlie.sheen.biz')
        helper.addOverride(override='lfn-prefix', overrideValue='test-win')

        executor.initialise(self.stepdata, self.job)
        self.setLocalOverride(self.stepdata)
        executor.step = self.stepdata
        executor.execute()
        self.assertFalse(os.path.exists(os.path.join(self.testDir, 'hosts')))
        self.assertFalse(
            os.path.exists(os.path.join(self.testDir, 'test1', 'hosts')))

    def testUnitTestBackendNew(self):
        myReport = Report()
        myReport.unpersist(
            os.path.join(self.testDir, 'UnitTests', 'WMTaskSpace', 'cmsRun1',
                         'Report.pkl'))
        myReport.data.cmsRun1.status = 1
        myReport.persist(
            os.path.join(self.testDir, 'UnitTests', 'WMTaskSpace', 'cmsRun1',
                         'Report.pkl'))

        executor = StageOutExecutor.StageOut()
        helper = StageOutTemplate.StageOutStepHelper(self.stepdata)
        helper.addOverride(override='command', overrideValue='test-win')
        helper.addOverride(override='option', overrideValue='')
        helper.addOverride(override='phedex-node',
                           overrideValue='charlie.sheen.biz')
        helper.addOverride(override='lfn-prefix', overrideValue='test-win')
        helper.setNewStageoutOverride(True)

        executor.initialise(self.stepdata, self.job)
        self.setLocalOverride(self.stepdata)
        executor.step = self.stepdata
        executor.execute()
        self.assertFalse(os.path.exists(os.path.join(self.testDir, 'hosts')))
        self.assertFalse(
            os.path.exists(os.path.join(self.testDir, 'test1', 'hosts')))

    def setLocalOverride(self, step):
        step.section_('override')
        step.override.command = 'cp'
        step.override.option = ''
        step.override.__setattr__('lfn-prefix', self.testDir + "/")
        step.override.__setattr__('phedex-node', 'DUMMYPNN')
Exemple #21
0
class CMSSW_t(unittest.TestCase):
    def setUp(self):
        """
        _setUp_

        Build a testing environment similar to a WN
        """
        self.testInit = TestInit(__file__)
        self.testDir = self.testInit.generateWorkDir()

        # Build a workload/task/step with the basic required information
        self.workload = newWorkload("UnitTests")
        self.task = self.workload.newTask("CMSSWExecutor")
        stepHelper = self.task.makeStep("ExecutorTest")
        self.step = stepHelper.data
        template = CMSSWTemplate()
        template(self.step)
        self.helper = template.helper(self.step)
        self.step.application.setup.scramCommand = "scramulator.py"
        self.step.application.command.executable = "cmsRun.py"
        self.step.application.setup.scramProject = "CMSSW"
        self.step.application.setup.scramArch = "slc5_ia32_gcc434"
        self.step.application.setup.cmsswVersion = "CMSSW_X_Y_Z"
        self.step.application.setup.softwareEnvironment = "echo \"Software Setup...\";"
        self.step.output.jobReport = "FrameworkJobReport.xml"
        self.helper.addOutputModule("outputRECORECO", primaryDataset="Bogus",
                                    processedDataset="Test-Era-v1",
                                    dataTier="DATA")
        self.helper.addOutputModule("outputALCARECORECO", primaryDataset="Bogus",
                                    processedDataset="Test-Era-v1",
                                    dataTier="DATA")
        self.helper.setGlobalTag("Bogus")
        taskMaker = TaskMaker(self.workload, self.testDir)
        taskMaker.skipSubscription = True
        taskMaker.processWorkload()

        # Build the TaskSpace/StepSpace
        self.sandboxDir = os.path.join(self.testDir, "UnitTests")
        self.task.build(self.testDir)
        sys.path.append(self.testDir)
        sys.path.append(self.sandboxDir)

        # Copy the files that cmsRun would have generated in the step space
        open(os.path.join(self.step.builder.workingDir, "outputRECORECO.root"), "w").close()
        open(os.path.join(self.step.builder.workingDir, "outputALCARECORECO.root"), "w").close()
        shutil.copy(os.path.join(getTestBase(),
                                 "WMCore_t/FwkJobReport_t/CMSSWProcessingReport.xml"),
                    os.path.join(self.step.builder.workingDir, "FrameworkJobReport.xml"))

        # Create a job
        self.job = Job(name="/UnitTest/CMSSWExecutor/ExecutorTest-test-job")
        self.job["id"] = 1

        # Set the PATH
        binDir = inspect.getsourcefile(ModuleLocator)
        binDir = binDir.replace("__init__.py", "bin")

        if not binDir in os.environ['PATH']:
            os.environ['PATH'] = "%s:%s" % (os.environ['PATH'], binDir)

        self.oldCwd = os.getcwd()

    def tearDown(self):
        """
        _tearDown_

        Clean up
        """
        self.testInit.delWorkDir()
        sys.path.remove(self.testDir)
        sys.path.remove(self.sandboxDir)
        try:
            sys.modules.pop("WMTaskSpace")
            sys.modules.pop("WMTaskSpace.ExecutorTest")
            sys.modules.pop("WMTaskSpace.ExecutorTest.WMCore")
            sys.modules.pop("WMSandbox")
            sys.modules.pop("WMSandbox.CMSSWExecutor")
            sys.modules.pop("WMSandbox.CMSSWExecutor.ExecutorTest")
        except KeyError:
            return

        return

    def testA_Execute(self):
        """
        _Execute_

        Test the full path, including execute.
        """
        try:
            os.chdir(self.step.builder.workingDir)
            executor = CMSSWExecutor()
            executor.initialise(self.step, self.job)
            executor.pre()
            # Remove the scram pre-script as it requires an actual SCRAM environment
            executor.step.runtime.scramPreScripts.remove("SetupCMSSWPset")
            executor.execute()
            executor.post()
            # Check that there were no errors
            self.assertEqual(0, executor.report.getExitCode())
            # Check that we processed the XML
            self.assertEqual(2, len(executor.report.getAllFiles()))
            # Check that the executable was really executed
            self.assertTrue(os.path.isfile(os.path.join(self.step.builder.workingDir, "BogusFile.txt")))
        except Exception as ex:
            self.fail("Failure encountered, %s" % str(ex))
        finally:
            os.chdir(self.oldCwd)
        return

    def testB_ExecuteNonZeroExit(self):
        """
        _ExecuteNonZeroExit_

        Test the execution of a script
        which exits with non-zero code.
        """
        self.step.application.command.executable = "brokenCmsRun.py"
        shutil.copy(os.path.join(getTestBase(),
                                 "WMCore_t/FwkJobReport_t/CMSSWFailReport.xml"),
                    os.path.join(self.step.builder.workingDir, "FrameworkJobReport.xml"))
        try:
            os.chdir(self.step.builder.workingDir)
            executor = StepFactory.getStepExecutor("CMSSW")
            executor.initialise(self.step, self.job)
            executor.pre()
            executor.step.runtime.scramPreScripts.remove("SetupCMSSWPset")
            try:
                executor.execute()
                self.fail("An exception should have been raised")
            except WMExecutionFailure as ex:
                executor.diagnostic(ex.code, executor, ExceptionInstance=ex)
                self.assertEqual(8001, executor.report.getExitCode())
                report = Report()
                report.load("Report.pkl")
                self.assertEqual(8001, report.getExitCode())
        except Exception as ex:
            self.fail("Failure encountered, %s" % str(ex))
        finally:
            os.chdir(self.oldCwd)
        return

    def testC_ExecuteSegfault(self):
        """
        _ExecuteSegfault_

        Test the execution of a script
        which raises a ABRT signal which
        is the normal CMSSW response
        to a SEGFAULT.
        """
        self.step.application.command.executable = "test.sh"
        # CMSSW leaves an empty FWJR when a SEGFAULT is present
        open(os.path.join(self.step.builder.workingDir, "FrameworkJobReport.xml"), "w").close()
        try:
            os.chdir(self.step.builder.workingDir)
            executor = StepFactory.getStepExecutor("CMSSW")
            executor.initialise(self.step, self.job)
            executor.pre()
            executor.step.runtime.scramPreScripts.remove("SetupCMSSWPset")
            try:
                executor.execute()
                self.fail("An exception should have been raised")
            except WMExecutionFailure as ex:
                executor.diagnostic(ex.code, executor, ExceptionInstance=ex)
                self.assertEqual(50115, executor.report.getExitCode())
                report = Report()
                report.load("Report.pkl")
                self.assertEqual(50115, report.getExitCode())
        except Exception as ex:
            self.fail("Failure encountered, %s" % str(ex))
        finally:
            os.chdir(self.oldCwd)
        return

    def testD_ExecuteNoOutput(self):
        """
        _ExecuteNoOutput_

        Test what happens when no output is produced,
        the proper error should be included.
        """
        self.step.application.command.executable = "cmsRun.py"
        shutil.copy(os.path.join(getTestBase(),
                                 "WMCore_t/FwkJobReport_t/CMSSWSkippedAll.xml"),
                    os.path.join(self.step.builder.workingDir, "FrameworkJobReport.xml"))
        try:
            os.chdir(self.step.builder.workingDir)
            executor = StepFactory.getStepExecutor("CMSSW")
            executor.initialise(self.step, self.job)
            executor.pre()
            executor.step.runtime.scramPreScripts.remove("SetupCMSSWPset")
            executor.execute()
            executor.post()
            self.assertEqual(60450, executor.report.getExitCode())
        except Exception as ex:
            self.fail("Failure encountered, %s" % str(ex))
        finally:
            os.chdir(self.oldCwd)
        return
Exemple #22
0
class RunJobTest(unittest.TestCase):
    """
    _RunJobTest_


    Test the RunJob object and accessors
    """
    def setUp(self):

        myThread = threading.currentThread()

        self.testInit = TestInit(__file__)
        self.testInit.setLogging()
        self.testInit.setDatabaseConnection()
        #self.tearDown()
        self.testInit.setSchema(customModules=[
            "WMCore.WMBS", "WMCore.BossAir", "WMCore.ResourceControl",
            "WMCore.Agent.Database"
        ],
                                useDefault=False)

        self.daoFactory = DAOFactory(package="WMCore.BossAir",
                                     logger=myThread.logger,
                                     dbinterface=myThread.dbi)

        resourceControl = ResourceControl()
        resourceControl.insertSite(siteName='Xanadu',
                                   pnn='se.Xanadu',
                                   ceName='Xanadu',
                                   plugin="TestPlugin")
        resourceControl.insertThreshold(siteName = 'Xanadu', taskType = 'Processing', \
                                        maxSlots = 10000, pendingSlots = 10000)

        # Create user
        wmbsFactory = DAOFactory(package="WMCore.WMBS",
                                 logger=myThread.logger,
                                 dbinterface=myThread.dbi)
        newuser = wmbsFactory(classname="Users.New")
        newuser.execute(dn="mnorman",
                        group_name="phgroup",
                        role_name="cmsrole")

        if PY3:
            self.assertItemsEqual = self.assertCountEqual

    def tearDown(self):
        """
        Database deletion
        """
        self.testInit.clearDatabase(modules=[
            "WMCore.WMBS", "WMCore.BossAir", "WMCore.ResourceControl",
            "WMCore.Agent.Database"
        ])

        self.testInit.delWorkDir()

        return

    def createJobs(self, nJobs):
        """
        Creates a series of jobGroups for submissions

        """

        testWorkflow = Workflow(spec="dummy",
                                owner="mnorman",
                                name="dummy",
                                task="basicWorkload/Production")
        testWorkflow.create()

        # Create Fileset, Subscription, jobGroup
        testFileset = Fileset(name="dummy")
        testFileset.create()
        testSubscription = Subscription(fileset=testFileset,
                                        workflow=testWorkflow,
                                        type="Processing",
                                        split_algo="FileBased")
        testSubscription.create()

        testJobGroup = JobGroup(subscription=testSubscription)
        testJobGroup.create()

        # Create jobs
        for id in range(nJobs):
            testJob = Job(name='Job_%i' % (id))
            testJob['owner'] = "mnorman"
            testJob['location'] = 'Xanadu'
            testJob.create(testJobGroup)
            testJobGroup.add(testJob)

        testFileset.commit()
        testJobGroup.commit()

        return testJobGroup

    def testA_BulkDAOs(self):
        """
        _BulkDAOs_

        Test the bulk DAO options, which is the only thing we should be using.
        """

        myThread = threading.currentThread()

        jobGroup = self.createJobs(nJobs=10)

        runJobs = []
        for job in jobGroup.jobs:
            runJob = RunJob(jobid=job.exists())
            runJob['status'] = 'New'
            runJob['userdn'] = job['owner']
            runJob['usergroup'] = 'phgroup'
            runJob['userrole'] = 'cmsrole'
            runJobs.append(runJob)

        statusDAO = self.daoFactory(classname="NewState")
        statusDAO.execute(states=['New', 'Gone', 'Dead'])

        result = myThread.dbi.processData(
            "SELECT name FROM bl_status")[0].fetchall()
        self.assertItemsEqual(result, [('Dead', ), ('Gone', ), ('New', )])

        newJobDAO = self.daoFactory(classname="NewJobs")
        newJobDAO.execute(jobs=runJobs)

        result = myThread.dbi.processData(
            "SELECT wmbs_id FROM bl_runjob")[0].fetchall()
        self.assertEqual(result, [(1, ), (2, ), (3, ), (4, ), (5, ), (6, ),
                                  (7, ), (8, ), (9, ), (10, )])

        loadJobsDAO = self.daoFactory(classname="LoadByStatus")
        loadJobs = loadJobsDAO.execute(status="New")
        self.assertEqual(len(loadJobs), 10)

        idList = [x['id'] for x in loadJobs]

        for job in loadJobs:
            job['bulkid'] = 1001

        updateDAO = self.daoFactory(classname="UpdateJobs")
        updateDAO.execute(jobs=loadJobs)

        loadJobs = loadJobsDAO.execute(status='New')
        self.assertEqual(len(loadJobs), 10)
        for job in loadJobs:
            self.assertEqual(job['bulkid'], '1001')

        loadWMBSDAO = self.daoFactory(classname="LoadByWMBSID")
        for job in jobGroup.jobs:
            jDict = loadWMBSDAO.execute(jobs=[job])
            self.assertEqual(job['id'], jDict[0]['jobid'])

        setStatusDAO = self.daoFactory(classname="SetStatus")
        setStatusDAO.execute(jobs=idList, status='Dead')

        result = loadJobsDAO.execute(status='Dead')
        self.assertEqual(len(result), 10)
        result = loadJobsDAO.execute(status='New')
        self.assertEqual(len(result), 0)

        runningJobDAO = self.daoFactory(classname="LoadRunning")
        runningJobs = runningJobDAO.execute()

        self.assertEqual(len(runningJobs), 10)

        completeDAO = self.daoFactory(classname="CompleteJob")
        completeDAO.execute(jobs=idList)

        result = loadJobsDAO.execute(status='Dead', complete='0')
        self.assertEqual(len(result), 10)

        deleteDAO = self.daoFactory(classname="DeleteJobs")
        deleteDAO.execute(jobs=idList)

        result = loadJobsDAO.execute(status='Dead')
        self.assertEqual(len(result), 0)

        return

    def testB_CheckWMBSBuild(self):
        """
        _CheckWMBSBuild_

        Trivial test that checks whether we can build
        runJobs from WMBS jobs
        """

        jobGroup = self.createJobs(nJobs=10)

        for job in jobGroup.jobs:
            rj = RunJob()
            rj.buildFromJob(job=job)
            self.assertEqual(job['id'], rj['jobid'])
            self.assertEqual(job['retry_count'], rj['retry_count'])
            job2 = rj.buildWMBSJob()
            self.assertEqual(job['id'], job2['id'])
            self.assertEqual(job['retry_count'], job2['retry_count'])

        return

    def testC_CheckWMBSBuildRoleAndGroup(self):
        """
        _CheckWMBSBuild_

        Trivial test that checks whether we can build
        runJobs from WMBS jobs
        """
        jobGroup = []

        # Create jobs
        for id in range(10):
            testJob = Job(name='Job_%i' % (id))
            testJob['owner'] = "mnorman"
            testJob['usergroup'] = "mygroup_%i" % id
            testJob['userrole'] = "myrole_%i" % id
            testJob['location'] = 'Xanadu'
            jobGroup.append(testJob)

        for job in jobGroup:
            rj = RunJob()
            rj.buildFromJob(job=job)
            self.assertEqual(job['usergroup'], rj['usergroup'])
            self.assertEqual(job['userrole'], rj['userrole'])
            job2 = rj.buildWMBSJob()
            self.assertEqual(job['usergroup'], job2['usergroup'])
            self.assertEqual(job['userrole'], job2['userrole'])
        return
Exemple #23
0
class CMSSW_t(unittest.TestCase):
    def setUp(self):
        """
        build a step for testing purposes

        """
        self.testInit = TestInit(__file__)
        self.testDir = self.testInit.generateWorkDir()

        self.workload = newWorkload("UnitTests")
        self.task = self.workload.newTask("CMSSWExecutor")
        stepHelper = step = self.task.makeStep("ExecutorTest")
        self.step = stepHelper.data
        template = CMSSWTemplate()
        template(self.step)
        self.helper = template.helper(self.step)
        self.step.application.setup.scramCommand = "scramulator.py"
        self.step.application.command.executable = "cmsRun.py"
        self.step.application.setup.scramProject = "CMSSW"
        self.step.application.setup.scramArch = "slc5_ia32_gcc434"
        self.step.application.setup.cmsswVersion = "CMSSW_X_Y_Z"
        self.step.application.setup.softwareEnvironment = "echo \"Software Setup...\";"

        taskMaker = TaskMaker(self.workload, self.testDir)
        taskMaker.skipSubscription = True
        taskMaker.processWorkload()

        self.sandboxDir = "%s/UnitTests" % self.testDir

        self.task.build(self.testDir)
        sys.path.append(self.testDir)
        sys.path.append(self.sandboxDir)

        self.job = Job(name="/UnitTest/CMSSWExecutor/ExecutorTest-test-job")

        binDir = inspect.getsourcefile(ModuleLocator)
        binDir = binDir.replace("__init__.py", "bin")

        if not binDir in os.environ['PATH']:
            os.environ['PATH'] = "%s:%s" % (os.environ['PATH'], binDir)

    def tearDown(self):
        """
        clean up
        """
        self.testInit.delWorkDir()
        sys.path.remove(self.testDir)
        sys.path.remove(self.sandboxDir)

    def testA_PrePost(self):
        """
        _PrePost_

        This test should run on any machine.
        It will simply make sure there are no major syntax errors or
        incompatibilities.

        It does not test the main functionality.
        """
        executor = CMSSWExecutor()
        executor.initialise(self.step, self.job)
        executor.pre()
        #executor.execute()
        executor.post()
        self.assertEqual(executor.report.data.ExecutorTest.status, 1)
        self.assertEqual(
            executor.report.data.ExecutorTest.analysis.files.fileCount, 0)
        return

    def testB_Execute(self):
        """
        _Execute_

        This test will only run where the scram setup can be done properly.

        To be honest it might have to be updated, but I don't know how.
        For now I'm skipping it.
        """

        executor = CMSSWExecutor()
        executor.initialise(self.step, self.job)
        executor.pre()
        try:
            executor.execute()
        except WMExecutionFailure:
            # This fails for me now
            # If it does so, just return
            return
        executor.post()

        print executor.report
        return
class Test(unittest.TestCase):


    def setUp(self):

        self.testInit = TestInit(__file__)
        self.testInit.setLogging()
        self.testInit.setDatabaseConnection( destroyAllDatabase = True )
        self.testInit.setSchema(customModules = ["WMCore.WMBS", "WMCore.BossAir"],
                                useDefault = False)
        
        self.testDir = self.testInit.generateWorkDir()
        self.sites = ["somese"]
        
        myThread = threading.currentThread()        

        self.daoFactory = DAOFactory(package = "WMCore.WMBS",
                                     logger = myThread.logger,
                                     dbinterface = myThread.dbi)
        self.baDaoFactory =  DAOFactory(package = "WMCore.BossAir",
                                     logger = myThread.logger,
                                     dbinterface = myThread.dbi)
        return


    def tearDown(self):
        #self.testInit.clearDatabase()
        self.testInit.delWorkDir()
        return
    
    
    def createJobGroups(self, nSubs, nJobs, task, workloadSpec, site = None, bl = [], wl = []):
        """
        Creates a series of jobGroups for submissions

        """

        jobGroupList = []

        testWorkflow = Workflow(spec = workloadSpec, owner = "mnorman",
                                name = makeUUID(), task="basicWorkload/Production",
                                owner_vogroup = 'phgroup', owner_vorole = 'cmsrole')
        testWorkflow.create()

        # Create subscriptions
        for i in range(nSubs):

            name = makeUUID()

            # Create Fileset, Subscription, jobGroup
            testFileset = Fileset(name = name)
            testFileset.create()
            testSubscription = Subscription(fileset = testFileset,
                                            workflow = testWorkflow,
                                            type = "Processing",
                                            split_algo = "FileBased")
            testSubscription.create()

            testJobGroup = JobGroup(subscription = testSubscription)
            testJobGroup.create()


            # Create jobs
            self.makeNJobs(name = name, task = task,
                           nJobs = nJobs,
                           jobGroup = testJobGroup,
                           fileset = testFileset,
                           sub = testSubscription.exists(),
                           site = site, bl = bl, wl = wl)



            testFileset.commit()
            testJobGroup.commit()
            jobGroupList.append(testJobGroup)

        return jobGroupList


    def makeNJobs(self, name, task, nJobs, jobGroup, fileset, sub, site = None, bl = [], wl = []):
        """
        _makeNJobs_

        Make and return a WMBS Job and File
        This handles all those damn add-ons

        """
        # Set the CacheDir
        cacheDir = os.path.join(self.testDir, 'CacheDir')

        for n in range(nJobs):
            # First make a file
            #site = self.sites[0]
            testFile = File(lfn = "/singleLfn/%s/%s" %(name, n),
                            size = 1024, events = 10)
            if site:
                testFile.setLocation(site)
            else:
                for tmpSite in self.sites:
                    testFile.setLocation('%s' % (tmpSite))
            testFile.create()
            fileset.addFile(testFile)


        fileset.commit()

        index = 0
        for f in fileset.files:
            index += 1
            testJob = Job(name = '%s-%i' %(name, index))
            testJob.addFile(f)
            testJob["location"]  = f.getLocations()[0]
            testJob['custom']['location'] = f.getLocations()[0]
            testJob['task']    = task
            testJob['sandbox'] = task
            testJob['spec']    = os.path.join(self.testDir, 'basicWorkload.pcl')
            testJob['mask']['FirstEvent'] = 101
            testJob['owner']   = 'mnorman'
            testJob["siteBlacklist"] = bl
            testJob["siteWhitelist"] = wl
            testJob['ownerDN'] = 'mnorman'
            testJob['ownerRole'] = 'cmsrole'
            testJob['ownerGroup'] = 'phgroup'

            jobCache = os.path.join(cacheDir, 'Sub_%i' % (sub), 'Job_%i' % (index))
            os.makedirs(jobCache)
            testJob.create(jobGroup)
            testJob['cache_dir'] = jobCache
            testJob.save()
            jobGroup.add(testJob)
            output = open(os.path.join(jobCache, 'job.pkl'),'w')
            pickle.dump(testJob, output)
            output.close()

        return testJob, testFile

    def testA(self):
        """
        This will give us a:
        
        TypeError: expecting numeric data
        """
        
        newBossAirState = self.baDaoFactory(classname = "NewState")
        
        states = ["Dead", "Alive", "Zombie", "Reanimated"]
        
        newBossAirState.execute(states)
        
        newUserDAO = self.daoFactory(classname = "Users.New")
        
        user = {'dn' : '/DC=mop/DC=azeroth/OU=Organic Units/OU=Users/CN=chen/CN=Chen Stormstout', 'hn' : 'chen',
                'owner' : 'chen', 'group' : 'brewmasters', 'group_name' : 'DEFAULT', 'role_name' : 'DEFAULT'}

        newUserDAO.execute(**user)
        
        newLocationDAO = self.daoFactory(classname = "Locations.New")
        
        location = {'siteName' : 'SomeSite', 'runningSlots' : 10, 'pendingSlots' : 20, 'seName' : 'somese', 'ceName' : 'yeahsomece',
                    'plugin' : 'THEplugin', 'cmsName' : 'notforthisone'}

        newLocationDAO.execute(**location)
        
        jobOne = {'jobid' : 1, 'gridid' : None, 'bulkid' : None, 'status' : 'Alive' , 'retry_count' : 0,
                  'userdn' : '/DC=mop/DC=azeroth/OU=Organic Units/OU=Users/CN=chen/CN=Chen Stormstout',
                  'usergroup' : 'DEFAULT', 'userrole'  : 'DEFAULT', 'siteName' : 'SomeSite'}
        
        jobTwo = {'jobid' : 2, 'gridid' : None, 'bulkid' : None, 'status' : 'Alive' , 'retry_count' : 0,
                  'userdn' : '/DC=mop/DC=azeroth/OU=Organic Units/OU=Users/CN=chen/CN=Chen Stormstout',
                  'usergroup' : 'DEFAULT', 'userrole'  : 'DEFAULT', 'siteName' : 'SomeSite'}
        
        self.createJobGroups(1, 2, "SomeTask", "Spec.xml")

        newRunJobDao = self.baDaoFactory(classname = "NewJobs")
        newRunJobDao.execute([jobOne, jobTwo])
        
        
        
        updateJobDAO = self.baDaoFactory(classname = "UpdateJobs")
        
        jobOneUpdate = {'jobid' : 1, 'gridid' : None, 'bulkid' : None, 'status' : 'Dead' , 'retry_count' : 0,
                  'userdn' : '/DC=mop/DC=azeroth/OU=Organic Units/OU=Users/CN=chen/CN=Chen Stormstout',
                  'usergroup' : 'DEFAULT', 'userrole'  : 'DEFAULT', 'id' : 1, 'status_time' : '0'}
        
        jobTwoUpdate = {'jobid' : 2, 'gridid' : None, 'bulkid' : None, 'status' : 'Dead' , 'retry_count' : 0,
                  'userdn' : '/DC=mop/DC=azeroth/OU=Organic Units/OU=Users/CN=chen/CN=Chen Stormstout',
                  'usergroup' : 'DEFAULT', 'userrole'  : 'DEFAULT', 'id' : 2, 'status_time' : 0}

        updateJobDAO.execute([jobTwoUpdate, jobOneUpdate])
        
    def testB(self):
        """
        This will give us a:
        
        TypeError: expecting string, unicode or buffer object
        """
        
        newBossAirState = self.baDaoFactory(classname = "NewState")
        
        states = ["Dead", "Alive", "Zombie", "Reanimated"]
        
        newBossAirState.execute(states)
        
        newUserDAO = self.daoFactory(classname = "Users.New")
        
        user = {'dn' : '/DC=mop/DC=azeroth/OU=Organic Units/OU=Users/CN=chen/CN=Chen Stormstout', 'hn' : 'chen',
                'owner' : 'chen', 'group' : 'brewmasters', 'group_name' : 'DEFAULT', 'role_name' : 'DEFAULT'}

        newUserDAO.execute(**user)
        
        newLocationDAO = self.daoFactory(classname = "Locations.New")
        
        location = {'siteName' : 'SomeSite', 'runningSlots' : 10, 'pendingSlots' : 20, 'seName' : 'somese', 'ceName' : 'yeahsomece',
                    'plugin' : 'THEplugin', 'cmsName' : 'notforthisone'}

        newLocationDAO.execute(**location)
        
        jobOne = {'jobid' : 1, 'gridid' : None, 'bulkid' : None, 'status' : 'Alive' , 'retry_count' : 0,
                  'userdn' : '/DC=mop/DC=azeroth/OU=Organic Units/OU=Users/CN=chen/CN=Chen Stormstout',
                  'usergroup' : 'DEFAULT', 'userrole'  : 'DEFAULT', 'siteName' : 'SomeSite'}
        
        jobTwo = {'jobid' : 2, 'gridid' : None, 'bulkid' : None, 'status' : 'Alive' , 'retry_count' : 0,
                  'userdn' : '/DC=mop/DC=azeroth/OU=Organic Units/OU=Users/CN=chen/CN=Chen Stormstout',
                  'usergroup' : 'DEFAULT', 'userrole'  : 'DEFAULT', 'siteName' : 'SomeSite'}
        
        self.createJobGroups(1, 2, "SomeTask", "Spec.xml")

        newRunJobDao = self.baDaoFactory(classname = "NewJobs")
        newRunJobDao.execute([jobOne, jobTwo])
        
        
        
        updateJobDAO = self.baDaoFactory(classname = "UpdateJobs")
        
        jobOneUpdate = {'jobid' : 1, 'gridid' : None, 'bulkid' : None, 'status' : 'Dead' , 'retry_count' : 0,
                  'userdn' : '/DC=mop/DC=azeroth/OU=Organic Units/OU=Users/CN=chen/CN=Chen Stormstout',
                  'usergroup' : 'DEFAULT', 'userrole'  : 'DEFAULT', 'id' : 1, 'status_time' : '0'}
        
        jobTwoUpdate = {'jobid' : 2, 'gridid' : None, 'bulkid' : None, 'status' : 'Dead' , 'retry_count' : 0,
                  'userdn' : '/DC=mop/DC=azeroth/OU=Organic Units/OU=Users/CN=chen/CN=Chen Stormstout',
                  'usergroup' : 'DEFAULT', 'userrole'  : 'DEFAULT', 'id' : 2, 'status_time' : 0}

        updateJobDAO.execute([jobOneUpdate, jobTwoUpdate])
class ConfigurationTest(unittest.TestCase):
    """
    test case for Configuration object

    """
    def setUp(self):
        """set up"""
        self.testInit = TestInit(__file__)
        self.testDir  = self.testInit.generateWorkDir()
        self.normalSave = "%s/WMCore_Agent_Configuration_t_normal.py" % self.testDir
        self.docSave = "%s/WMCore_Agent_Configuration_t_documented.py" % self.testDir
        self.commentSave = "%s/WMCore_Agent_Configuration_t_commented.py" % self.testDir


    def tearDown(self):
        """clean up"""
        self.testInit.delWorkDir()


    def testA(self):
        """ctor"""
        try:
            dummyConfig = Configuration()
        except Exception as ex:
            msg = "Failed to instantiate Configuration\n"
            msg += str(ex)
            self.fail(msg)


    def testB(self):
        """add settings"""
        config = Configuration()
        config.section_("Section1")

        section1 = getattr(config, "Section1", None)
        self.assertTrue(section1 != None)

        config.section_("Section2")
        section2 = getattr(config, "Section2", None)
        self.assertTrue(section2 != None)

        self.assertRaises(AttributeError, getattr, config, "Section3")

        # basic types
        config.Section1.Parameter1 = True
        config.Section1.Parameter2 = "string"
        config.Section1.Parameter3 = 123
        config.Section1.Parameter4 = 123.456

        self.assertEqual(config.Section1.Parameter1, True)
        self.assertEqual(config.Section1.Parameter2, "string")
        self.assertEqual(config.Section1.Parameter3, 123)
        self.assertEqual(config.Section1.Parameter4, 123.456)

        # dictionary format:
        try:
            section1Dict = config.Section1.dictionary_()
        except Exception as ex:
            msg = "Error converting section to dictionary:\n"
            msg += "%s\n" % str(ex)
            self.fail(msg)

        self.assertTrue( "Parameter1" in section1Dict)
        self.assertTrue( "Parameter2" in section1Dict)
        self.assertTrue( "Parameter3" in section1Dict)
        self.assertTrue( "Parameter4" in section1Dict)

        self.assertEqual(section1Dict['Parameter1'],
                         config.Section1.Parameter1)
        self.assertEqual(section1Dict['Parameter2'],
                         config.Section1.Parameter2)
        self.assertEqual(section1Dict['Parameter3'],
                         config.Section1.Parameter3)
        self.assertEqual(section1Dict['Parameter4'],
                         config.Section1.Parameter4)

        # compound types
        config.Section2.List = ["string", 123, 123.456, False]
        config.Section2.Dictionary = { "string" : "string",
                                       "int" : 123,
                                       "float" : 123.456,
                                       "bool" : False}
        config.Section2.Tuple = ("string", 123, 123.456, False)


        self.assertEqual(config.Section2.List,
                         ["string", 123, 123.456, False])
        self.assertEqual(config.Section2.Tuple,
                         ("string", 123, 123.456, False))

        class DummyObject:
            pass
        # unsupported parameter type
        self.assertRaises(
            RuntimeError, setattr,
            config.Section2, "BadObject", DummyObject())
        # unsupported data type in compound type
        badList = [ DummyObject(), DummyObject()]
        self.assertRaises(
            RuntimeError, setattr,
            config.Section2, "BadList", badList)

        badDict = { "dict" : {}, "list": [DummyObject()], "tuple" : () }
        self.assertRaises(
            RuntimeError, setattr,
            config.Section2, "BadDict", badDict)

        goodDict = { "dict" : {}, "list": [], "tuple" : () }
        config.Section2.GoodDict = goodDict


    def testC(self):
        """add components"""
        config = Configuration()
        config.component_("Component1")
        config.component_("Component2")
        config.component_("Component3")

        comp1 = getattr(config, "Component1", None)
        self.assertTrue(comp1 != None)
        comp2 = getattr(config, "Component2", None)
        self.assertTrue(comp2 != None)


    def testD(self):
        """test documentation"""
        config = Configuration()
        config.section_("Section1")
        config.Section1.Parameter1 = True
        config.Section1.Parameter2 = "string"
        config.Section1.Parameter3 = 123
        config.Section1.Parameter4 = 123.456
        config.Section1.Parameter5 = {"test1" : "test2", "test3" : 123}

        config.Section1.document_("""This is Section1""")
        config.Section1.document_("""This is Section1.Parameter1""",
                                  "Parameter1")
        config.Section1.document_("""This is Section1.Parameter2""",
                                  "Parameter2")
        config.Section1.document_("""This is Section1.Parameter3\n with multiline comments""",
                                  "Parameter3")

        try:
            config.Section1.documentedString_()
        except Exception as ex:
            msg = "Error calling ConfigSection.documentedString_:\n"
            msg += "%s\n" % str(ex)
            self.fail(msg)
        try:
            config.Section1.commentedString_()
        except Exception as ex:
            msg = "Error calling ConfigSection.commentedString_:\n"
            msg += "%s\n" % str(ex)
            self.fail(msg)

        try:
            config.documentedString_()
        except Exception as ex:
            msg = "Error calling Configuration.documentedString_:\n"
            msg += "%s\n" % str(ex)
            self.fail(msg)
        try:
            config.commentedString_()
        except Exception as ex:
            msg = "Error calling Configuration.commentedString_:\n"
            msg += "%s\n" % str(ex)
            self.fail(msg)


    def testE(self):
        """test save/load """
        testValues = [
            "string", 123, 123.456,
            ["list", 789, 10.1 ],
            { "dict1" : "value", "dict2" : 10.0 }
            ]
        config = Configuration()
        for x in range(0, 5):
            config.section_("Section%s" % x)
            config.component_("Component%s" % x)
            sect = getattr(config, "Section%s" % x)
            comp = getattr(config, "Component%s" % x)
            sect.document_("This is Section%s" % x)
            comp.document_("This is Component%s" % x)

            for i in range(0, 5):
                setattr(comp, "Parameter%s" % i, testValues[i])
                setattr(sect, "Parameter%s" % i, testValues[i])
                comp.document_("This is Parameter%s" % i,
                               "Parameter%s" %i)
                sect.document_("This is Parameter%s" %i,
                               "Parameter%s" %i)

        dummyStringSave = str(config)
        dummyDocumentSave = config.documentedString_()
        dummyCommentSave = config.commentedString_()

        saveConfigurationFile(config, self.normalSave)
        saveConfigurationFile(config, self.docSave, document = True)
        saveConfigurationFile(config, self.commentSave, comment = True)

        dummyPlainConfig = loadConfigurationFile(self.normalSave)

        dummyDocConfig = loadConfigurationFile(self.docSave)

        dummyCommentConfig = loadConfigurationFile(self.commentSave)

        #print commentConfig.commentedString_()
        #print docConfig.documentedString_()
        #print docConfig.commentedString_()


    def testF(self):
        """
        Test internal functions pythonise_, listSections_
        """
        config = ConfigSection("config")

        config.section_("SectionA")
        config.section_("SectionB")
        config.SectionA.section_("Section1")
        config.SectionA.section_("Section2")
        config.SectionA.Section1.x   = 100
        config.SectionA.Section1.y   = 100

        pythonise = config.pythonise_()

        assert "config.section_('SectionA')"      in pythonise, "Pythonise failed: Could not find SectionA"
        assert "config.SectionA.Section1.x = 100" in pythonise, "Pythonise failed: Could not find x"

        pythonise = config.SectionA.pythonise_()

        assert "SectionA.section_('Section1')" in pythonise, "Pythonise failed: Could not find Section1"
        assert "SectionA.Section1.x = 100"     in pythonise, "Pythonise failed: Could not find x"

        self.assertEqual(config.listSections_(), ['SectionB', 'SectionA'])
        self.assertEqual(config.SectionA.listSections_(), ['Section2', 'Section1'])


    def testG_testStaticReferenceToConfigurationInstance(self):
        """
        test Configuration.getInstance() which returns reference
        to the Configuration object instance.

        """
        config = Configuration()
        instance = Configuration.getInstance()
        self.assertFalse(hasattr(instance, "testsection"))
        config.section_("testsection")
        self.assertTrue(hasattr(instance, "testsection"))
        config.testsection.var = 10
        self.assertEqual(instance.testsection.var, 10)


    def testH_ConfigSectionDictionariseInternalChildren(self):
        """
        The test checks if any item of the dictionary_whole_tree_()
        result is not unexpanded instance of ConfigSection.

        """
        config = ConfigSection("config")
        config.value1 = "MyValue1"
        config.section_("Task1")
        config.Task1.value2 = "MyValue2"
        config.Task1.section_("subSection")
        config.Task1.subSection.value3 = "MyValue3"
        d = config.dictionary_whole_tree_()
        for values in d.values():
            self.assertFalse(isinstance(values, ConfigSection))
        self.assertEqual(d["Task1"]["subSection"]["value3"], "MyValue3")
Exemple #26
0
class deleteFileTest(unittest.TestCase):


    def setUp(self):
        # stolen from CMSSWExecutor_t. thanks, dave
        self.testInit = TestInit(__file__)
        self.testDir = self.testInit.generateWorkDir()
        shutil.copyfile('/etc/hosts', os.path.join(self.testDir, 'testfile'))

        self.workload = newWorkload("UnitTests")
        self.task = self.workload.newTask("DeleterTask")
        stepHelper = step = self.task.makeStep("DeleteTest")
        self.step = stepHelper.data
        self.actualStep = stepHelper
        template = DeleteTemplate()
        template(self.step)
        self.helper = template.helper(self.step)
        self.executor = StepFactory.getStepExecutor(self.actualStep.stepType())

        taskMaker = TaskMaker(self.workload, self.testDir)
        taskMaker.skipSubscription = True
        taskMaker.processWorkload()

        self.sandboxDir = "%s/UnitTests" % self.testDir

        self.task.build(self.testDir)
        sys.path.insert(0, self.testDir)
        sys.path.insert(0, self.sandboxDir)


        self.job = Job(name = "/UnitTest/DeleterTask/DeleteTest-test-job")

        binDir = inspect.getsourcefile(ModuleLocator)
        binDir = binDir.replace("__init__.py", "bin")

        if not binDir in os.environ['PATH']:
            os.environ['PATH'] = "%s:%s" % (os.environ['PATH'], binDir)

    def tearDown(self):
        self.testInit.delWorkDir()
        sys.path.remove(self.testDir)
        sys.path.remove(self.sandboxDir)

    def setLocalOverride(self, step):
        step.section_('override')
        step.override.command    = 'cp'
        step.override.option     = ''
        step.override.__setattr__('lfn-prefix', '')
        step.override.__setattr__('se-name','DUMMYSE')


    @attr('integration')
    def testManualDeleteOld(self):
        self.assertTrue(os.path.exists( os.path.join(self.testDir, 'testfile')))
        self.step.section_('filesToDelete')
        self.step.filesToDelete.file1 = os.path.join(self.testDir, 'testfile')
        self.setLocalOverride(self.step)
        self.executor.initialise(self.step, self.job)
        self.executor.execute()
        self.assertFalse(os.path.exists( os.path.join(self.testDir, 'testfile')))
        return

    @attr('integration')
    def testManualDeleteNew(self):
        self.assertTrue(os.path.exists( os.path.join(self.testDir, 'testfile')))
        self.step.section_('filesToDelete')
        self.step.filesToDelete.file1 = os.path.join(self.testDir, 'testfile')
        self.setLocalOverride(self.step)
        self.step.override.newStageOut = True
        self.executor.initialise(self.step, self.job)
        self.executor.execute()
        self.assertFalse(os.path.exists( os.path.join(self.testDir, 'testfile')))
        return

    @attr('integration')
    def testJobDeleteOld(self):
        self.assertTrue(os.path.exists( os.path.join(self.testDir, 'testfile')))
        self.setLocalOverride(self.step)
        self.job['input_files'] = [ {'lfn': os.path.join(self.testDir, 'testfile') } ]
        self.executor.initialise(self.step, self.job)
        self.executor.execute()
        self.assertFalse(os.path.exists( os.path.join(self.testDir, 'testfile')))
        return

    @attr('integration')
    def testJobDeleteNew(self):
        self.assertTrue(os.path.exists( os.path.join(self.testDir, 'testfile')))
        self.setLocalOverride(self.step)
        self.step.override.newStageOut = True
        self.job['input_files'] = [ {'lfn': os.path.join(self.testDir, 'testfile') } ]
        self.executor.initialise(self.step, self.job)
        self.executor.execute()
        self.assertFalse(os.path.exists( os.path.join(self.testDir, 'testfile')))
        return
Exemple #27
0
class RunJobTest(unittest.TestCase):
    """
    _RunJobTest_


    Test the RunJob object and accessors
    """


    def setUp(self):

        myThread = threading.currentThread()

        self.testInit = TestInit(__file__)
        self.testInit.setLogging()
        self.testInit.setDatabaseConnection()
        #self.tearDown()
        self.testInit.setSchema(customModules = ["WMCore.WMBS", "WMCore.BossAir", "WMCore.ResourceControl", "WMCore.Agent.Database"],
                                useDefault = False)

        self.daoFactory = DAOFactory(package = "WMCore.BossAir",
                                     logger = myThread.logger,
                                     dbinterface = myThread.dbi)

        resourceControl = ResourceControl()
        resourceControl.insertSite(siteName = 'Xanadu', seName = 'se.Xanadu',
                                   ceName = 'Xanadu', plugin = "TestPlugin")
        resourceControl.insertThreshold(siteName = 'Xanadu', taskType = 'Processing', \
                                        maxSlots = 10000, pendingSlots = 10000)

        # Create user
        wmbsFactory = DAOFactory(package = "WMCore.WMBS",
                                 logger = myThread.logger,
                                 dbinterface = myThread.dbi)
        newuser = wmbsFactory(classname = "Users.New")
        newuser.execute(dn = "mnorman", group_name = "phgroup", role_name = "cmsrole")



    def tearDown(self):
        """
        Database deletion
        """
        self.testInit.clearDatabase(modules = ["WMCore.WMBS", "WMCore.BossAir", "WMCore.ResourceControl", "WMCore.Agent.Database"])

        self.testInit.delWorkDir()

        return


    def createJobs(self, nJobs):
        """
        Creates a series of jobGroups for submissions

        """

        testWorkflow = Workflow(spec = "dummy", owner = "mnorman",
                                name = "dummy", task="basicWorkload/Production")
        testWorkflow.create()

        # Create Fileset, Subscription, jobGroup
        testFileset = Fileset(name = "dummy")
        testFileset.create()
        testSubscription = Subscription(fileset = testFileset,
                                            workflow = testWorkflow,
                                            type = "Processing",
                                            split_algo = "FileBased")
        testSubscription.create()

        testJobGroup = JobGroup(subscription = testSubscription)
        testJobGroup.create()


        # Create jobs
        for id in range(nJobs):
            testJob = Job(name = 'Job_%i' % (id))
            testJob['owner']    = "mnorman"
            testJob['location'] = 'Xanadu'
            testJob.create(testJobGroup)
            testJobGroup.add(testJob)

        testFileset.commit()
        testJobGroup.commit()


        return testJobGroup





    def testA_BulkDAOs(self):
        """
        _BulkDAOs_

        Test the bulk DAO options, which is the only thing we should be using.
        """

        myThread = threading.currentThread()

        jobGroup = self.createJobs(nJobs = 10)

        runJobs = []
        for job in jobGroup.jobs:
            runJob = RunJob(jobid = job.exists())
            runJob['status']    = 'New'
            runJob['userdn']    = job['owner']
            runJob['usergroup'] = 'phgroup'
            runJob['userrole']  = 'cmsrole'
            runJobs.append(runJob)


        statusDAO = self.daoFactory(classname = "NewState")
        statusDAO.execute(states = ['New', 'Gone', 'Dead'])

        result = myThread.dbi.processData("SELECT name FROM bl_status")[0].fetchall()
        self.assertEqual(result, [('Dead',), ('Gone',), ('New',)])


        newJobDAO = self.daoFactory(classname = "NewJobs")
        newJobDAO.execute(jobs = runJobs)


        result = myThread.dbi.processData("SELECT wmbs_id FROM bl_runjob")[0].fetchall()
        self.assertEqual(result,
                         [(1L,), (2L,), (3L,), (4L,), (5L,), (6L,), (7L,), (8L,), (9L,), (10L,)])


        loadJobsDAO = self.daoFactory(classname = "LoadByStatus")
        loadJobs = loadJobsDAO.execute(status = "New")
        self.assertEqual(len(loadJobs), 10)


        idList = [x['id'] for x in loadJobs]

        for job in loadJobs:
            job['bulkid'] = 1001

        updateDAO = self.daoFactory(classname = "UpdateJobs")
        updateDAO.execute(jobs = loadJobs)

        loadJobs = loadJobsDAO.execute(status = 'New')
        self.assertEqual(len(loadJobs), 10)
        for job in loadJobs:
            self.assertEqual(job['bulkid'], '1001')


        loadWMBSDAO = self.daoFactory(classname = "LoadByWMBSID")
        for job in jobGroup.jobs:
            jDict = loadWMBSDAO.execute(jobs = [job])
            self.assertEqual(job['id'], jDict[0]['jobid'])


        setStatusDAO = self.daoFactory(classname = "SetStatus")
        setStatusDAO.execute(jobs = idList, status = 'Dead')

        result = loadJobsDAO.execute(status = 'Dead')
        self.assertEqual(len(result), 10)
        result = loadJobsDAO.execute(status = 'New')
        self.assertEqual(len(result), 0)

        runningJobDAO = self.daoFactory(classname = "LoadRunning")
        runningJobs = runningJobDAO.execute()

        self.assertEqual(len(runningJobs), 10)

        completeDAO = self.daoFactory(classname = "CompleteJob")
        completeDAO.execute(jobs = idList)

        result = loadJobsDAO.execute(status = 'Dead', complete = '0')
        self.assertEqual(len(result), 10)


        deleteDAO = self.daoFactory(classname = "DeleteJobs")
        deleteDAO.execute(jobs = idList)

        result = loadJobsDAO.execute(status = 'Dead')
        self.assertEqual(len(result), 0)



        return


    def testB_CheckWMBSBuild(self):
        """
        _CheckWMBSBuild_

        Trivial test that checks whether we can build
        runJobs from WMBS jobs
        """

        jobGroup = self.createJobs(nJobs = 10)

        for job in jobGroup.jobs:
            rj = RunJob()
            rj.buildFromJob(job = job)
            self.assertEqual(job['id'], rj['jobid'])
            self.assertEqual(job['retry_count'], rj['retry_count'])
            job2 = rj.buildWMBSJob()
            self.assertEqual(job['id'], job2['id'])
            self.assertEqual(job['retry_count'], job2['retry_count'])


        return

    def testC_CheckWMBSBuildRoleAndGroup(self):
        """
        _CheckWMBSBuild_

        Trivial test that checks whether we can build
        runJobs from WMBS jobs
        """
        jobGroup = []

        # Create jobs
        for id in range(10):
            testJob = Job( name = 'Job_%i' % (id) )
            testJob['owner']    = "mnorman"
            testJob['usergroup'] = "mygroup_%i" % id
            testJob['userrole'] = "myrole_%i" % id
            testJob['location'] = 'Xanadu'
            jobGroup.append(testJob)

        for job in jobGroup:
            rj = RunJob()
            rj.buildFromJob(job = job)
            self.assertEqual(job['usergroup'], rj['usergroup'])
            self.assertEqual(job['userrole'], rj['userrole'])
            job2 = rj.buildWMBSJob()
            self.assertEqual(job['usergroup'], job2['usergroup'])
            self.assertEqual(job['userrole'], job2['userrole'])
        return
Exemple #28
0
class CMSSW_t(unittest.TestCase):
    def setUp(self):
        """
        _setUp_

        Build a testing environment similar to a WN
        """
        self.testInit = TestInit(__file__)
        self.testDir = self.testInit.generateWorkDir()

        # Build a workload/task/step with the basic required information
        self.workload = newWorkload("UnitTests")
        self.task = self.workload.newTask("CMSSWExecutor")
        stepHelper = self.task.makeStep("ExecutorTest")
        self.step = stepHelper.data
        template = CMSSWTemplate()
        template(self.step)
        self.helper = template.helper(self.step)
        self.step.application.setup.scramCommand = "scramulator.py"
        self.step.application.command.executable = "cmsRun.py"
        self.step.application.setup.scramProject = "CMSSW"
        self.step.application.setup.scramArch = "slc5_ia32_gcc434"
        self.step.application.setup.cmsswVersion = "CMSSW_X_Y_Z"
        self.step.application.setup.softwareEnvironment = "echo \"Software Setup...\";"
        self.step.output.jobReport = "FrameworkJobReport.xml"
        self.helper.addOutputModule("outputRECORECO", primaryDataset = "Bogus",
                                    processedDataset = "Test-Era-v1",
                                    dataTier = "DATA")
        self.helper.addOutputModule("outputALCARECORECO", primaryDataset = "Bogus",
                                    processedDataset = "Test-Era-v1",
                                    dataTier = "DATA")
        self.helper.setGlobalTag("Bogus")
        taskMaker = TaskMaker(self.workload, self.testDir)
        taskMaker.skipSubscription = True
        taskMaker.processWorkload()

        # Build the TaskSpace/StepSpace
        self.sandboxDir = os.path.join(self.testDir, "UnitTests")
        self.task.build(self.testDir)
        sys.path.append(self.testDir)
        sys.path.append(self.sandboxDir)

        # Copy the files that cmsRun would have generated in the step space
        open(os.path.join(self.step.builder.workingDir, "outputRECORECO.root"), "w").close()
        open(os.path.join(self.step.builder.workingDir, "outputALCARECORECO.root"), "w").close()
        shutil.copy(os.path.join(getTestBase(),
                                 "WMCore_t/FwkJobReport_t/CMSSWProcessingReport.xml"),
                    os.path.join(self.step.builder.workingDir, "FrameworkJobReport.xml"))

        # Create a job
        self.job = Job(name = "/UnitTest/CMSSWExecutor/ExecutorTest-test-job")
        self.job["id"] = 1

        # Set the PATH
        binDir = inspect.getsourcefile(ModuleLocator)
        binDir = binDir.replace("__init__.py", "bin")

        if not binDir in os.environ['PATH']:
            os.environ['PATH'] = "%s:%s" % (os.environ['PATH'], binDir)

        self.oldCwd = os.getcwd()

    def tearDown(self):
        """
        _tearDown_

        Clean up
        """
        self.testInit.delWorkDir()
        sys.path.remove(self.testDir)
        sys.path.remove(self.sandboxDir)
        sys.modules.pop("WMTaskSpace")
        sys.modules.pop("WMTaskSpace.ExecutorTest")
        sys.modules.pop("WMTaskSpace.ExecutorTest.WMCore")
        sys.modules.pop("WMSandbox")
        sys.modules.pop("WMSandbox.CMSSWExecutor")
        sys.modules.pop("WMSandbox.CMSSWExecutor.ExecutorTest")

        return

    def testA_Execute(self):
        """
        _Execute_

        Test the full path, including execute.
        """
        try:
            os.chdir(self.step.builder.workingDir)
            executor = CMSSWExecutor()
            executor.initialise(self.step, self.job)
            executor.pre()
            # Remove the scram pre-script as it requires an actual SCRAM environment
            executor.step.runtime.scramPreScripts.remove("SetupCMSSWPset")
            executor.execute()
            executor.post()
            # Check that there were no errors
            self.assertEqual(0, executor.report.getExitCode())
            # Check that we processed the XML
            self.assertEqual(2, len(executor.report.getAllFiles()))
            # Check that the executable was really executed
            self.assertTrue(os.path.isfile(os.path.join(self.step.builder.workingDir, "BogusFile.txt")))
        except Exception, ex:
            self.fail("Failure encountered, %s" % str(ex))
        finally:
Exemple #29
0
class Scram_t(unittest.TestCase):
    def setUp(self):
        self.testInit = TestInit(__file__)
        self.testInit.setLogging()
        self.testDir = self.testInit.generateWorkDir()

    def tearDown(self):
        self.testInit.delWorkDir()

    def testA(self):
        """
        instantiate a Scram instance in test mode.
        """
        try:
            s = Scram(initialise="/bin/date",
                      architecture="slc5_amd64_gcc454",
                      version="CMSSW_X_Y_Z",
                      test=True)
        except Exception as ex:
            msg = "Failed to instantiate Scram in test mode:\n %s " % str(ex)
            self.fail(msg)

    def testB(self):
        """
        instantiante a Scram instance in non-test mode
        limited what we can test here since we dont have scram etc in unittest env
        """
        try:
            s = Scram(initialise="/bin/date",
                      architecture="slc5_amd64_gcc454",
                      version="CMSSW_X_Y_Z")
        except Exception as ex:
            msg = "Failed to instantiate Scram:\n %s " % str(ex)
            self.fail(msg)

    def testC(self):
        """
        test all method calls in test mode

        """
        s = Scram(initialise="/bin/date",
                  architecture="slc5_amd64_gcc454",
                  version="CMSSW_X_Y_Z",
                  directory=self.testDir,
                  test=True)

        try:
            status = s.project()
        except Exception as ex:
            msg = "Error running Scram.project:\n %s" % str(ex)
            self.fail(msg)

        self.assertEqual(status, 0)
        self.assertTrue(os.path.exists(s.projectArea))
        self.assertTrue("project" in s.lastExecuted)
        self.assertTrue("CMSSW_X_Y_Z" in s.lastExecuted)

        try:
            status = s.runtime()
        except Exception as ex:
            msg = "Error running Scram.runtime:\n %s" % str(ex)
            self.fail(msg)

        self.assertEqual(status, 0)
        self.assertTrue("ru -sh" in s.lastExecuted)
        self.assertTrue("TEST_MODE" in s.runtimeEnv)

        comm = "echo \"Hello World\""
        try:
            status = s(comm)
        except Exception as ex:
            msg = "Failed to call Scram object:\n %s" % str(ex)

        self.assertEqual(status, 0)
        self.assertEqual(s.lastExecuted, comm)
Exemple #30
0
class ConfigurationTest(unittest.TestCase):
    """
    test case for Configuration object

    """
    def setUp(self):
        """set up"""
        self.testInit = TestInit(__file__)
        self.testDir = self.testInit.generateWorkDir()
        self.normalSave = "%s/WMCore_Agent_Configuration_t_normal.py" % self.testDir
        self.docSave = "%s/WMCore_Agent_Configuration_t_documented.py" % self.testDir
        self.commentSave = "%s/WMCore_Agent_Configuration_t_commented.py" % self.testDir

    def tearDown(self):
        """clean up"""
        self.testInit.delWorkDir()

    def testA(self):
        """ctor"""
        try:
            dummyConfig = Configuration()
        except Exception as ex:
            msg = "Failed to instantiate Configuration\n"
            msg += str(ex)
            self.fail(msg)

    def testB(self):
        """add settings"""
        config = Configuration()
        config.section_("Section1")

        section1 = getattr(config, "Section1", None)
        self.assertTrue(section1 != None)

        config.section_("Section2")
        section2 = getattr(config, "Section2", None)
        self.assertTrue(section2 != None)

        self.assertRaises(AttributeError, getattr, config, "Section3")

        # basic types
        config.Section1.Parameter1 = True
        config.Section1.Parameter2 = "string"
        config.Section1.Parameter3 = 123
        config.Section1.Parameter4 = 123.456

        self.assertEqual(config.Section1.Parameter1, True)
        self.assertEqual(config.Section1.Parameter2, "string")
        self.assertEqual(config.Section1.Parameter3, 123)
        self.assertEqual(config.Section1.Parameter4, 123.456)

        # dictionary format:
        try:
            section1Dict = config.Section1.dictionary_()
        except Exception as ex:
            msg = "Error converting section to dictionary:\n"
            msg += "%s\n" % str(ex)
            self.fail(msg)

        self.assertTrue("Parameter1" in section1Dict)
        self.assertTrue("Parameter2" in section1Dict)
        self.assertTrue("Parameter3" in section1Dict)
        self.assertTrue("Parameter4" in section1Dict)

        self.assertEqual(section1Dict['Parameter1'],
                         config.Section1.Parameter1)
        self.assertEqual(section1Dict['Parameter2'],
                         config.Section1.Parameter2)
        self.assertEqual(section1Dict['Parameter3'],
                         config.Section1.Parameter3)
        self.assertEqual(section1Dict['Parameter4'],
                         config.Section1.Parameter4)

        # compound types
        config.Section2.List = ["string", 123, 123.456, False]
        config.Section2.Dictionary = {
            "string": "string",
            "int": 123,
            "float": 123.456,
            "bool": False
        }
        config.Section2.Tuple = ("string", 123, 123.456, False)

        self.assertEqual(config.Section2.List, ["string", 123, 123.456, False])
        self.assertEqual(config.Section2.Tuple,
                         ("string", 123, 123.456, False))

        class DummyObject:
            pass

        # unsupported parameter type
        self.assertRaises(RuntimeError, setattr, config.Section2, "BadObject",
                          DummyObject())
        # unsupported data type in compound type
        badList = [DummyObject(), DummyObject()]
        self.assertRaises(RuntimeError, setattr, config.Section2, "BadList",
                          badList)

        badDict = {"dict": {}, "list": [DummyObject()], "tuple": ()}
        self.assertRaises(RuntimeError, setattr, config.Section2, "BadDict",
                          badDict)

        goodDict = {"dict": {}, "list": [], "tuple": ()}
        config.Section2.GoodDict = goodDict

    def testC(self):
        """add components"""
        config = Configuration()
        config.component_("Component1")
        config.component_("Component2")
        config.component_("Component3")

        comp1 = getattr(config, "Component1", None)
        self.assertTrue(comp1 != None)
        comp2 = getattr(config, "Component2", None)
        self.assertTrue(comp2 != None)

    def testD(self):
        """test documentation"""
        config = Configuration()
        config.section_("Section1")
        config.Section1.Parameter1 = True
        config.Section1.Parameter2 = "string"
        config.Section1.Parameter3 = 123
        config.Section1.Parameter4 = 123.456
        config.Section1.Parameter5 = {"test1": "test2", "test3": 123}

        config.Section1.document_("""This is Section1""")
        config.Section1.document_("""This is Section1.Parameter1""",
                                  "Parameter1")
        config.Section1.document_("""This is Section1.Parameter2""",
                                  "Parameter2")
        config.Section1.document_(
            """This is Section1.Parameter3\n with multiline comments""",
            "Parameter3")

        try:
            config.Section1.documentedString_()
        except Exception as ex:
            msg = "Error calling ConfigSection.documentedString_:\n"
            msg += "%s\n" % str(ex)
            self.fail(msg)
        try:
            config.Section1.commentedString_()
        except Exception as ex:
            msg = "Error calling ConfigSection.commentedString_:\n"
            msg += "%s\n" % str(ex)
            self.fail(msg)

        try:
            config.documentedString_()
        except Exception as ex:
            msg = "Error calling Configuration.documentedString_:\n"
            msg += "%s\n" % str(ex)
            self.fail(msg)
        try:
            config.commentedString_()
        except Exception as ex:
            msg = "Error calling Configuration.commentedString_:\n"
            msg += "%s\n" % str(ex)
            self.fail(msg)

    def testE(self):
        """test save/load """
        testValues = [
            "string", 123, 123.456, ["list", 789, 10.1], {
                "dict1": "value",
                "dict2": 10.0
            }
        ]
        config = Configuration()
        for x in range(0, 5):
            config.section_("Section%s" % x)
            config.component_("Component%s" % x)
            sect = getattr(config, "Section%s" % x)
            comp = getattr(config, "Component%s" % x)
            sect.document_("This is Section%s" % x)
            comp.document_("This is Component%s" % x)

            for i in range(0, 5):
                setattr(comp, "Parameter%s" % i, testValues[i])
                setattr(sect, "Parameter%s" % i, testValues[i])
                comp.document_("This is Parameter%s" % i, "Parameter%s" % i)
                sect.document_("This is Parameter%s" % i, "Parameter%s" % i)

        dummyStringSave = str(config)
        dummyDocumentSave = config.documentedString_()
        dummyCommentSave = config.commentedString_()

        saveConfigurationFile(config, self.normalSave)
        saveConfigurationFile(config, self.docSave, document=True)
        saveConfigurationFile(config, self.commentSave, comment=True)

        dummyPlainConfig = loadConfigurationFile(self.normalSave)

        dummyDocConfig = loadConfigurationFile(self.docSave)

        dummyCommentConfig = loadConfigurationFile(self.commentSave)

        #print commentConfig.commentedString_()
        #print docConfig.documentedString_()
        #print docConfig.commentedString_()

    def testF(self):
        """
        Test internal functions pythonise_, listSections_
        """
        config = ConfigSection("config")

        config.section_("SectionA")
        config.section_("SectionB")
        config.SectionA.section_("Section1")
        config.SectionA.section_("Section2")
        config.SectionA.Section1.x = 100
        config.SectionA.Section1.y = 100

        pythonise = config.pythonise_()

        assert "config.section_('SectionA')" in pythonise, "Pythonise failed: Could not find SectionA"
        assert "config.SectionA.Section1.x = 100" in pythonise, "Pythonise failed: Could not find x"

        pythonise = config.SectionA.pythonise_()

        assert "SectionA.section_('Section1')" in pythonise, "Pythonise failed: Could not find Section1"
        assert "SectionA.Section1.x = 100" in pythonise, "Pythonise failed: Could not find x"

        self.assertEqual(config.listSections_(), ['SectionB', 'SectionA'])
        self.assertEqual(config.SectionA.listSections_(),
                         ['Section2', 'Section1'])

    def testG_testStaticReferenceToConfigurationInstance(self):
        """
        test Configuration.getInstance() which returns reference
        to the Configuration object instance.

        """
        config = Configuration()
        instance = Configuration.getInstance()
        self.assertFalse(hasattr(instance, "testsection"))
        config.section_("testsection")
        self.assertTrue(hasattr(instance, "testsection"))
        config.testsection.var = 10
        self.assertEqual(instance.testsection.var, 10)

    def testH_ConfigSectionDictionariseInternalChildren(self):
        """
        The test checks if any item of the dictionary_whole_tree_()
        result is not unexpanded instance of ConfigSection.

        """
        config = ConfigSection("config")
        config.value1 = "MyValue1"
        config.section_("Task1")
        config.Task1.value2 = "MyValue2"
        config.Task1.section_("subSection")
        config.Task1.subSection.value3 = "MyValue3"
        d = config.dictionary_whole_tree_()
        for values in d.values():
            self.assertFalse(isinstance(values, ConfigSection))
        self.assertEqual(d["Task1"]["subSection"]["value3"], "MyValue3")
Exemple #31
0
class APITest(unittest.TestCase):
    def setUp(self):
        """
        This much stuff for simple alerts client API testing is
        needed because it's also testing setting up alert fw as
        done in the BaseWorkerThread.

        """
        myThread = threading.currentThread()
        self.testInit = TestInit(__file__)
        self.testInit.setLogging(logLevel = logging.DEBUG)
        self.testDir = self.testInit.generateWorkDir()
        # needed for instantiating BaseWorkerThread
        self.testInit.setDatabaseConnection()
        self.alertsReceiver = None


    def tearDown(self):
        self.testInit.clearDatabase()
        self.testInit.delWorkDir()
        if self.alertsReceiver:
            self.alertsReceiver.shutdown()


    def testAlertsMessagingBasic(self):
        config = getConfig("/tmp")
        self.assertTrue(hasattr(config, "Alert"))
        # initialization
        # sender: instance of Alert messages Sender
        # preAlert: pre-defined values for Alert instances generated from this class
        self.config = config # needed in setUpAlertsMessaging
        preAlert, sender = alertAPI.setUpAlertsMessaging(self,
                                                         compName = "testBasic")
        sendAlert = alertAPI.getSendAlert(sender = sender,
                                          preAlert = preAlert)

        # set up a temporary alert message receiver
        handler, receiver = utils.setUpReceiver(config.Alert.address,
                                                config.Alert.controlAddr)
        # test sending alert
        msg = "this is my message Basic"
        sendAlert(100, msg = msg)

        # wait for the alert to arrive
        while len(handler.queue) == 0:
            time.sleep(0.3)
            print "%s waiting for alert to arrive ..." % inspect.stack()[0][3]

        self.assertEqual(len(handler.queue), 1)
        alert = handler.queue[0]
        self.assertEqual(alert["Component"], "testBasic")
        self.assertEqual(alert["Level"], 100)
        self.assertEqual(alert["Source"], self.__class__.__name__)
        self.assertEqual(alert["Details"]["msg"], msg)

        sender.unregister()
        receiver.shutdown()


    def testAlertsMessagingNotSetUpViaBaseWorkerThread(self):
        # alerts will not be set up if 'config.Alert' etc is not provided
        config = Configuration()
        self.assertFalse(hasattr(config, "Alert"))
        # test the same way alerts are set up in the client code (currently
        # all cases via BaseWorkerThread)
        # this call creates .sender, but here will be set to None
        thread = BaseWorkerThread()
        thread.config = config
        thread.initAlerts(compName = "test1")
        self.assertFalse(thread.sender)
        # shall do nothing and not fail
        thread.sendAlert("nonsense", msg = "nonsense")


    def testAlertsSetUpAndSendingViaBaseWorkerThread(self):
        # calls as they are made from child/client classes of BaseWorkerThread
        config = getConfig("/tmp")
        self.assertTrue(hasattr(config, "Alert"))
        # test the same way alerts are set up in the client code (currently
        # all cases via BaseWorkerThread)
        # this call creates .sender, but here will be set to None
        thread = BaseWorkerThread()
        thread.config = config
        thread.initAlerts(compName = "test2")
        self.assertTrue(thread.sender)

        # set up a temporary alert message receiver
        handler, receiver = utils.setUpReceiver(config.Alert.address,
                                                config.Alert.controlAddr)

        # send an alert message
        msg = "this is my message 1"
        thread.sendAlert(10, msg = msg)

        # wait for the alert to arrive
        while len(handler.queue) == 0:
            time.sleep(0.3)
            print "%s waiting for alert to arrive ..." % inspect.stack()[0][3]

        self.assertEqual(len(handler.queue), 1)
        alert = handler.queue[0]
        self.assertEqual(alert["Component"], "test2")
        self.assertEqual(alert["Level"], 10)
        self.assertEqual(alert["Source"], thread.__class__.__name__)
        self.assertEqual(alert["Details"]["msg"], msg)

        thread.sender.unregister()
        receiver.shutdown()


    def testAgentConfigurationRetrieving(self):
        """
        Test that getting some agent details (config values from config.Agent
        section) will be correctly propagated into Alert instances.
        Alert instance is obtained via API.getPredefinedAlert factory.

        """
        d = dict(Additional = "detail")
        # instantiate just plain Alert, no configuration to take
        # into account at this point
        a = Alert(**d)
        self.assertEqual(a["HostName"], None)
        self.assertEqual(a["Contact"], None)
        self.assertEqual(a["TeamName"], None)
        self.assertEqual(a["AgentName"], None)
        self.assertEqual(a["Additional"], "detail")
        # instantiate via factory which reads configuration instance
        config = Configuration()
        config.section_("Agent")
        config.Agent.hostName = "some1"
        config.Agent.contact = "some2"
        config.Agent.teamName = "some3"
        config.Agent.agentName = "some4"
        a = alertAPI.getPredefinedAlert(**d)
        self.assertEqual(a["HostName"], "some1")
        self.assertEqual(a["Contact"], "some2")
        self.assertEqual(a["TeamName"], "some3")
        self.assertEqual(a["AgentName"], "some4")
        self.assertEqual(a["Additional"], "detail")
Exemple #32
0
class CMSSW_t(unittest.TestCase):
    def setUp(self):
        """
        build a step for testing purposes

        """
        self.testInit = TestInit(__file__)
        self.testDir = self.testInit.generateWorkDir()

        self.workload = newWorkload("UnitTests")
        self.task = self.workload.newTask("CMSSWExecutor")
        stepHelper = step = self.task.makeStep("ExecutorTest")
        self.step = stepHelper.data
        template = CMSSWTemplate()
        template(self.step)
        self.helper = template.helper(self.step)
        self.step.application.setup.scramCommand = "scramulator.py"
        self.step.application.command.executable = "cmsRun.py"
        self.step.application.setup.scramProject = "CMSSW"
        self.step.application.setup.scramArch = "slc5_ia32_gcc434"
        self.step.application.setup.cmsswVersion = "CMSSW_X_Y_Z"
        self.step.application.setup.softwareEnvironment = "echo \"Software Setup...\";"

        taskMaker = TaskMaker(self.workload, self.testDir)
        taskMaker.skipSubscription = True
        taskMaker.processWorkload()

        self.sandboxDir = "%s/UnitTests" % self.testDir

        self.task.build(self.testDir)
        sys.path.append(self.testDir)
        sys.path.append(self.sandboxDir)


        self.job = Job(name = "/UnitTest/CMSSWExecutor/ExecutorTest-test-job")

        binDir = inspect.getsourcefile(ModuleLocator)
        binDir = binDir.replace("__init__.py", "bin")

        if not binDir in os.environ['PATH']:
            os.environ['PATH'] = "%s:%s" % (os.environ['PATH'], binDir)

    def tearDown(self):
        """
        clean up
        """
        self.testInit.delWorkDir()
        sys.path.remove(self.testDir)
        sys.path.remove(self.sandboxDir)

    def testA_PrePost(self):
        """
        _PrePost_

        This test should run on any machine.
        It will simply make sure there are no major syntax errors or
        incompatibilities.

        It does not test the main functionality.
        """
        executor = CMSSWExecutor()
        executor.initialise(self.step, self.job)
        executor.pre()
        #executor.execute()
        executor.post()
        self.assertEqual(executor.report.data.ExecutorTest.status, 1)
        self.assertEqual(executor.report.data.ExecutorTest.analysis.files.fileCount, 0)
        return

    def testB_Execute(self):
        """
        _Execute_

        This test will only run where the scram setup can be done properly.

        To be honest it might have to be updated, but I don't know how.
        For now I'm skipping it.
        """

        executor = CMSSWExecutor()
        executor.initialise(self.step, self.job)
        executor.pre()
        try:
            executor.execute()
        except WMExecutionFailure:
            # This fails for me now
            # If it does so, just return
            return
        executor.post()

        print executor.report
        return
Exemple #33
0
class BaseTest(unittest.TestCase):
    """
    Some methods of this class are made static and are used from 
    other test cases.
    
    """
    def setUp(self):
        self.testInit = TestInit(__file__)
        self.testInit.setLogging(logLevel = logging.DEBUG)
        self.testDir = self.testInit.generateWorkDir()
        self.config = getConfig(self.testDir)
        self.testComponentDaemonXml = "/tmp/TestComponent/Daemon.xml" 
        
        
    def tearDown(self):       
        self.testInit.delWorkDir()
        self.generator = None
        # if the directory and file "/tmp/TestComponent/Daemon.xml" after
        # ComponentsPoller test exist, then delete it
        d = os.path.dirname(self.testComponentDaemonXml)
        if os.path.exists(d):
            shutil.rmtree(d)
                    

    def testSenderReceiverBasic(self):
        sender = Sender(self.config.Alert.address,
                        self.__class__.__name__,
                        self.config.Alert.controlAddr)
        handler, receiver = utils.setUpReceiver(self.config.Alert.address,
                                                self.config.Alert.controlAddr)
        a = Alert(Component = inspect.stack()[0][3])
        sender(a)
        while len(handler.queue) == 0:
            time.sleep(0.5)
            print "%s waiting for alert to arrive" % inspect.stack()[0][3]
        receiver.shutdown()
        self.assertEqual(len(handler.queue), 1)
        self.assertEqual(handler.queue[0]["Component"], inspect.stack()[0][3])
        
        
    def testProcessDetailBasic(self):
        pid = os.getpid()
        name = inspect.stack()[0][3] # test name
        pd = ProcessDetail(pid, name)
        self.assertEqual(pd.pid, pid)
        self.assertEqual(pd.name, name)
        self.assertEqual(pd.proc.pid, pid)
        numChildren = len(psutil.Process(pid).get_children())
        self.assertEqual(len(pd.children), numChildren)
        self.assertEqual(len(pd.allProcs), 1 + numChildren)
        d = pd.getDetails()
        self.assertEqual(d["pid"], pid)
        self.assertEqual(d["component"], name)
        self.assertEqual(d["numChildrenProcesses"], numChildren)
                
        
    def testMeasurementsBasic(self):
        numMes = 10
        mes = Measurements(numMes)
        self.assertEqual(mes._numOfMeasurements, numMes)
        self.assertEqual(len(mes), 0)
        mes.append(20)
        self.assertEqual(len(mes), 1)
        self.assertEqual(mes[0], 20)
        mes.append(30)
        self.assertEqual(mes[1], 30)
        mes.clear()
        self.assertEqual(len(mes), 0)
        self.assertEqual(mes._numOfMeasurements, numMes)
        

    def testBasePollerBasic(self):
        config = getConfig("/tmp")
        # create some non-sence config section. just need a bunch of values defined        
        config.AlertGenerator.section_("bogusPoller")
        config.AlertGenerator.bogusPoller.soft = 5 # [percent]
        config.AlertGenerator.bogusPoller.critical = 50 # [percent] 
        config.AlertGenerator.bogusPoller.pollInterval = 2  # [second]
        config.AlertGenerator.bogusPoller.period = 10
        
        generator = utils.AlertGeneratorMock(config)
        poller = BasePoller(config.AlertGenerator.bogusPoller, generator)
        # define dummy check method
        poller.check = lambda: 1+1
        poller.start()
        # poller now runs
        time.sleep(config.AlertGenerator.bogusPoller.pollInterval * 2)
        poller.terminate()
        while poller.is_alive():
            time.sleep(0.2)
            print "%s waiting for test poller to terminate" % inspect.stack()[0][3]
            
        
    def testPeriodPollerOnRealProcess(self):
        config = getConfig("/tmp")
        config.component_("AlertProcessor")
        config.AlertProcessor.section_("critical")
        config.AlertProcessor.section_("soft")
        config.AlertProcessor.critical.level = 5
        config.AlertProcessor.soft.level = 0        
        config.component_("AlertGenerator")
        config.AlertGenerator.section_("bogusPoller")
        config.AlertGenerator.bogusPoller.soft = 5 # [percent]
        config.AlertGenerator.bogusPoller.critical = 50 # [percent] 
        config.AlertGenerator.bogusPoller.pollInterval = 0.2  # [second]
        # period during which measurements are collected before evaluating for
        # possible alert triggering
        config.AlertGenerator.bogusPoller.period = 1
        
        generator = utils.AlertGeneratorMock(config)
        poller = PeriodPoller(config.AlertGenerator.bogusPoller, generator)
        poller.sender = utils.SenderMock()
        # get CPU usage percentage, it's like measuring CPU usage of a real
        # component, so use the appropriate poller's method for that
        # (PeriodPoller itself is higher-level class so it doesn't define
        # a method to provide sampling data)
        poller.sample = lambda processDetail: ComponentsCPUPoller.sample(processDetail)
        
        # get own pid
        pid = os.getpid()
        name = inspect.stack()[0][3] # test name
        pd = ProcessDetail(pid, name)
        # need to repeat sampling required number of measurements
        numOfMeasurements = int(config.AlertGenerator.bogusPoller.period / 
                                config.AlertGenerator.bogusPoller.pollInterval)
        mes = Measurements(numOfMeasurements)
        self.assertEqual(len(mes), 0)
        for i in range(mes._numOfMeasurements):
            poller.check(pd, mes)
                    
        # since the whole measurement cycle was done, values should have been nulled
        self.assertEqual(len(mes), 0)
        
        
    def testPeriodPollerCalculationPredefinedInput(self):
        config = getConfig("/tmp")
        config.component_("AlertProcessor")
        config.AlertProcessor.section_("critical")
        config.AlertProcessor.section_("soft")
        config.AlertProcessor.critical.level = 5
        config.AlertProcessor.soft.level = 0        
        config.component_("AlertGenerator")
        config.AlertGenerator.section_("bogusPoller")
        # put some threshold numbers, just need to check output calculation
        # from check() method
        config.AlertGenerator.bogusPoller.soft = 5 # [percent]
        config.AlertGenerator.bogusPoller.critical = 50 # [percent] 
        config.AlertGenerator.bogusPoller.pollInterval = 0.2  # [second]
        config.AlertGenerator.bogusPoller.period = 1
        
        generator = utils.AlertGeneratorMock(config)
        poller = PeriodPoller(config.AlertGenerator.bogusPoller, generator)
        # since poller may trigger an alert, give it mock sender
        poller.sender = utils.SenderMock()
        # provide sample method with predefined input, float
        predefInput = 10.12
        poller.sample = lambda processDetail: predefInput
        
        processDetail = None
        numOfMeasurements = int(config.AlertGenerator.bogusPoller.period / 
                                config.AlertGenerator.bogusPoller.pollInterval)
        mes = Measurements(numOfMeasurements)
        for i in range(mes._numOfMeasurements):
            poller.check(processDetail, mes)
            
        # the above loop should went 5 times, should reach evaluation of 5 x predefInput
        # values, the average should end up 10, which should trigger soft threshold
        self.assertEqual(len(poller.sender.queue), 1)
        a = poller.sender.queue[0]
        
        self.assertEqual(a["Component"], generator.__class__.__name__)
        self.assertEqual(a["Source"], poller.__class__.__name__)
        d = a["Details"]
        self.assertEqual(d["threshold"], "%s%%" % config.AlertGenerator.bogusPoller.soft)
        self.assertEqual(d["numMeasurements"], mes._numOfMeasurements)
        self.assertEqual(d["period"], config.AlertGenerator.bogusPoller.period)
        self.assertEqual(d["average"], "%s%%" % predefInput)
        # since the whole measurement cycle was done, values should have been nulled
        self.assertEqual(len(mes), 0)
Exemple #34
0
class AgentTest(unittest.TestCase):
    def setUp(self):
        self.testInit = TestInit(__file__)
        self.testInit.setLogging(logLevel = logging.DEBUG)
        self.testDir = self.testInit.generateWorkDir()
        self.config = getConfig(self.testDir)
        # mock generator instance to communicate some configuration values
        self.generator = utils.AlertGeneratorMock(self.config)        
        self.testComponentDaemonXml = os.path.join(self.testDir, "Daemon.xml") 
        
        
    def tearDown(self):       
        self.testInit.delWorkDir()
        self.generator = None
            

    def testComponentsPollerBasic(self):
        """
        Test ComponentsPoller class.
        Beware of different process context in real running.
        
        """
        config = getConfig("/tmp")
        config.component_("AlertProcessor")
        config.AlertProcessor.section_("critical")
        config.AlertProcessor.section_("soft")
        config.AlertProcessor.critical.level = 5
        config.AlertProcessor.soft.level = 0        
        config.component_("AlertGenerator")
        config.AlertGenerator.section_("bogusPoller")
        config.AlertGenerator.bogusPoller.soft = 5 # [percent]
        config.AlertGenerator.bogusPoller.critical = 90 # [percent] 
        config.AlertGenerator.bogusPoller.pollInterval = 2  # [second]
        # period during which measurements are collected before evaluating for possible alert triggering
        config.AlertGenerator.bogusPoller.period = 10
        
        # need to create some temp directory, real process and it's
        # Daemon.xml so that is looks like agents component process 
        # and check back the information
        pid = os.getpid()
        config.component_("TestComponent")
        d = os.path.dirname(self.testComponentDaemonXml)
        config.TestComponent.componentDir = d
        if not os.path.exists(d):
            os.mkdir(d)
        f = open(self.testComponentDaemonXml, 'w')
        f.write(utils.daemonXmlContent % dict(PID_TO_PUT = pid))
        f.close()
                
        generator = utils.AlertGeneratorMock(config)
        poller = ComponentsPoller(config.AlertGenerator.bogusPoller, generator)
        
        # only 1 component should have valid workDir with proper Daemon.xml content
        # other components present in the configuration (AlertProcessor, AlertGenerator)
        # should have been ignored
        self.assertEqual(len(poller._components), 1)
        pd = poller._components[0]
        self.assertEqual(pd.pid, pid)
        self.assertEqual(pd.name, "TestComponent")
        self.assertEqual(len(pd.children), 0)
        self.assertEqual(len(poller._compMeasurements), 1)
        mes = poller._compMeasurements[0]
        numMeasurements = round(config.AlertGenerator.bogusPoller.period / config.AlertGenerator.bogusPoller.pollInterval, 0)
        self.assertEqual(mes._numOfMeasurements, numMeasurements)
        
        shutil.rmtree(d)
        

    def _doComponentsPoller(self, thresholdToTest, level, config,
                            pollerClass, expected = 0):
        """
        Components pollers have array of Measurements and ProcessDetails
        which make it more difficult to factory with test methods from the
        utils module.
        
        """
        handler, receiver = utils.setUpReceiver(self.generator.config.Alert.address,
                                                self.generator.config.Alert.controlAddr)
        
        # need some real process to poll, give itself
        pid = os.getpid()
        # the input configuration doesn't have component work directories set right, rectify:
        # the configuration will have, see with what _doComponentsPoller is called
        # two components: AlertGenerator and AlertProcessor defined
        configInstance = Configuration.getInstance()
        for comp in Configuration.getInstance().listComponents_():
            compDir = getattr(configInstance, comp).componentDir
            compDir = os.path.join(compDir, comp)
            setattr(getattr(configInstance, comp), "componentDir", compDir)
            os.makedirs(compDir)
            f = open(os.path.join(compDir, "Daemon.xml"), 'w')
            f.write(utils.daemonXmlContent % dict(PID_TO_PUT = pid))
            f.close()
   
        numMeasurements = config.period / config.pollInterval
        poller = pollerClass(config, self.generator)
        # inject own input sample data provider
        # there is in fact input argument in this case which needs be ignored
        poller.sample = lambda proc_: random.randint(thresholdToTest, thresholdToTest + 10)
        
        # the poller will run upon components (as defined in the configuration)
        # and poll them. the PID, etc will be run from the compomentsDir
        poller.start()
        self.assertTrue(poller.is_alive())

        if expected != 0:
            # watch so that the test can't take for ever, fail in 2mins
            timeLimitExceeded = False
            startTime = datetime.datetime.now()
            limitTime = 2 * 60 # seconds
            while len(handler.queue) == 0:
                time.sleep(config.pollInterval / 5)
                if (datetime.datetime.now() - startTime).seconds > limitTime:
                    timeLimitExceeded = True
                    break                
        else:
            time.sleep(config.period * 2)
            
        poller.terminate()
        receiver.shutdown()
        self.assertFalse(poller.is_alive())
        
        if expected != 0:
            if timeLimitExceeded:
                self.fail("No alert received in %s seconds." % limitTime)            
            # there should be just one alert received, poller should have the
            # change to send a second
            self.assertEqual(len(handler.queue), expected)
            a = handler.queue[0]
            # soft threshold - alert should have 'soft' level
            self.assertEqual(a["Level"], level)
            self.assertEqual(a["Component"], self.generator.__class__.__name__)
            self.assertEqual(a["Source"], poller.__class__.__name__)
            
            
    def testComponentsCPUPollerSoftThreshold(self):
        self.config.AlertGenerator.componentsCPUPoller.soft = 70
        self.config.AlertGenerator.componentsCPUPoller.critical = 80
        self.config.AlertGenerator.componentsCPUPoller.pollInterval = 0.2
        self.config.AlertGenerator.componentsCPUPoller.period = 1
        level = self.config.AlertProcessor.soft.level
        thresholdToTest = self.config.AlertGenerator.componentsCPUPoller.soft
        # expected 2: 2 components are defined by the configuration, an alert
        # will be sent for each of them
        self._doComponentsPoller(thresholdToTest, level,
                                 self.config.AlertGenerator.componentsCPUPoller,
                                 ComponentsCPUPoller, expected = 2)        
       

    def testComponentsCPUPollerCriticalThreshold(self):
        self.config.AlertGenerator.componentsCPUPoller.soft = 70
        self.config.AlertGenerator.componentsCPUPoller.critical = 80
        self.config.AlertGenerator.componentsCPUPoller.pollInterval = 0.2
        self.config.AlertGenerator.componentsCPUPoller.period = 1
        level = self.config.AlertProcessor.critical.level
        thresholdToTest = self.config.AlertGenerator.componentsCPUPoller.critical
        # expected 2: 2 components are defined by the configuration, an alert
        # will be sent for each of them
        self._doComponentsPoller(thresholdToTest, level,
                                 self.config.AlertGenerator.componentsCPUPoller,
                                 ComponentsCPUPoller, expected = 2)


    def testComponentsCPUPollerNoAlert(self):
        self.config.AlertGenerator.componentsCPUPoller.soft = 70
        self.config.AlertGenerator.componentsCPUPoller.critical = 80
        self.config.AlertGenerator.componentsCPUPoller.pollInterval = 0.2
        self.config.AlertGenerator.componentsCPUPoller.period = 1
        level = 0
        # lower the threshold so that the alert never happens
        thresholdToTest = self.config.AlertGenerator.componentsCPUPoller.soft - 10
        self._doComponentsPoller(thresholdToTest, level,
                                 self.config.AlertGenerator.componentsCPUPoller,
                                 ComponentsCPUPoller, expected = 0)
        

    def testComponentsMemoryPollerSoftThreshold(self):
        self.config.AlertGenerator.componentsMemPoller.soft = 70
        self.config.AlertGenerator.componentsMemPoller.critical = 80
        self.config.AlertGenerator.componentsMemPoller.pollInterval = 0.2
        self.config.AlertGenerator.componentsMemPoller.period = 1
        level = self.config.AlertProcessor.soft.level
        thresholdToTest = self.config.AlertGenerator.componentsMemPoller.soft
        # expected 2: 2 components are defined by the configuration, an alert
        # will be sent for each of them
        self._doComponentsPoller(thresholdToTest, level,
                                 self.config.AlertGenerator.componentsMemPoller,
                                 ComponentsMemoryPoller, expected = 2)


    def testComponentsMemoryPollerCriticalThreshold(self):
        self.config.AlertGenerator.componentsMemPoller.soft = 70
        self.config.AlertGenerator.componentsMemPoller.critical = 80
        self.config.AlertGenerator.componentsMemPoller.pollInterval = 0.2
        self.config.AlertGenerator.componentsMemPoller.period = 1
        level = self.config.AlertProcessor.critical.level
        thresholdToTest = self.config.AlertGenerator.componentsMemPoller.critical
        # expected 2: 2 components are defined by the configuration, an alert
        # will be sent for each of them
        self._doComponentsPoller(thresholdToTest, level,
                                 self.config.AlertGenerator.componentsMemPoller,
                                 ComponentsMemoryPoller, expected = 2)


    def testComponentsMemoryPollerNoAlert(self):
        self.config.AlertGenerator.componentsMemPoller.soft = 70
        self.config.AlertGenerator.componentsMemPoller.critical = 80
        self.config.AlertGenerator.componentsMemPoller.pollInterval = 0.2
        self.config.AlertGenerator.componentsMemPoller.period = 1
        level = 0
        # lower the threshold so that the alert never happens
        thresholdToTest = self.config.AlertGenerator.componentsCPUPoller.soft - 10
        self._doComponentsPoller(thresholdToTest, level,
                                 self.config.AlertGenerator.componentsMemPoller,
                                 ComponentsMemoryPoller, expected = 0)
                

    def testComponentsCPUPollerPossiblyOnLiveAgent(self):
        """
        If there is currently running agent upon WMAGENT_CONFIG
        configuration, then the test will pick up live processes
        and poll them.
        
        """
        # check if the live agent configuration was loaded (above this class)    
        if globals().has_key("config"):
            self.config = config
            # AlertProcessor values - values for Level soft, resp. critical
            # are also needed by this AlertGenerator test
            self.config.component_("AlertProcessor")
            self.config.AlertProcessor.componentDir = "/tmp"
            self.config.AlertProcessor.section_("critical")
            self.config.AlertProcessor.section_("soft")
            self.config.AlertProcessor.critical.level = 5
            self.config.AlertProcessor.soft.level = 0
            
            self.config.component_("AlertGenerator")
            self.config.AlertGenerator.componentDir = "/tmp"
            self.config.section_("Alert")
            self.config.Alert.address = "tcp://127.0.0.1:6557"
            self.config.Alert.controlAddr = "tcp://127.0.0.1:6559"
            
            self.config.AlertGenerator.section_("componentsCPUPoller")
        else:
            self.config = getConfig("/tmp")
        
        self.config.AlertGenerator.componentsCPUPoller.soft = 70
        self.config.AlertGenerator.componentsCPUPoller.critical = 80
        self.config.AlertGenerator.componentsCPUPoller.pollInterval = 0.2
        self.config.AlertGenerator.componentsCPUPoller.period = 0.3
                
        # generator has already been instantiated, but need another one
        # with just defined configuration
        # mock generator instance to communicate some configuration values
        self.generator = utils.AlertGeneratorMock(self.config)
        
        handler, receiver = utils.setUpReceiver(self.generator.config.Alert.address,
                                                self.generator.config.Alert.controlAddr)

        numMeasurements = self.config.AlertGenerator.componentsCPUPoller.period / self.config.AlertGenerator.componentsCPUPoller.pollInterval
        poller = ComponentsCPUPoller(self.config.AlertGenerator.componentsCPUPoller, self.generator)
        # inject own input sample data provider
        thresholdToTest = self.config.AlertGenerator.componentsCPUPoller.soft
        # there is in fact input argument in this case which needs be ignored
        poller.sample = lambda proc_: random.randint(thresholdToTest - 10, thresholdToTest)
        
        poller.start()
        self.assertTrue(poller.is_alive())

        # no alert shall arrive
        time.sleep(5 * self.config.AlertGenerator.componentsCPUPoller.period)
        
        poller.terminate()
        receiver.shutdown()
        self.assertFalse(poller.is_alive())
Exemple #35
0
class AlertGeneratorTest(unittest.TestCase):
    def setUp(self):
        self.testInit = TestInit(__file__)
        self.testInit.setLogging(logLevel = logging.DEBUG)
        self.testInit.setDatabaseConnection()
        self.testInit.setSchema(customModules = ["WMCore.WMBS",'WMCore.Agent.Database',
                                                 "WMCore.ResourceControl"],
                                useDefault = False)
        self.testDir = self.testInit.generateWorkDir()
        # AlertGenerator instance
        self.generator = None
        self.config = getConfig(self.testDir)
        self.config.section_("CoreDatabase")
        self.config.CoreDatabase.socket = os.environ.get("DBSOCK")
        self.config.CoreDatabase.connectUrl = os.environ.get("DATABASE")
        self.testComponentDaemonXml = os.path.join(self.testDir, "Daemon.xml")


    def tearDown(self):
        self.testInit.clearDatabase()
        self.testInit.delWorkDir()
        self.generator = None
        # if the directory and file "/tmp/TestComponent/Daemon.xml" after
        # ComponentsPoller test exist, then delete it
        d = os.path.dirname(self.testComponentDaemonXml)
        if os.path.exists(d):
            shutil.rmtree(d)


    def _startComponent(self):
        self.generator = AlertGenerator(self.config)
        try:
            # self.proc.startComponent() causes the flow to stop, Harness.py
            # the method just calls prepareToStart() and waits for ever
            # self.proc.startDaemon() no good for this either ... puts everything
            # on background
            self.generator.prepareToStart() # is method of Harness
        except Exception as ex:
            print ex
            self.fail(str(ex))
        logging.debug("AlertGenerator and its sub-components should be running now ...")


    def _stopComponent(self):
        logging.debug("Going to stop the AlertGenerator ...")
        # stop via component method
        try:
            self.generator.stopAlertGenerator()
        except Exception as ex:
            logging.error(ex)
            self.fail(str(ex))
        logging.debug("AlertGenerator should be stopped now.")


    def testAlertProcessorBasic(self):
        """
        Just tests starting and stopping the component machinery.
        Should start and stop all configured pollers.

        """
        # the generator will run full-fledged pollers that may get triggered
        # to send some alerts. need to consume such in order to avoid clashes
        # further tests
        handler, receiver = utils.setUpReceiver(self.config.Alert.address,
                                                self.config.Alert.controlAddr)
        self._startComponent()
        # test that all poller processes are running
        for poller in self.generator._pollers:
            self.assertTrue(poller.is_alive())
        # just give the pollers some time to run
        time.sleep(5)
        self._stopComponent()
        receiver.shutdown()
        print "%s alerts captured by the way (test %s)." % (len(handler.queue),
                                                            inspect.stack()[0][3])


    def testAllFinalClassPollerImplementations(self):
        """
        Any new end (final) implementation of new poller(s) should be add
        here to test its basic flow chain.

        """
        config = getConfig("/tmp")
        # create some non-sence config section. just need a bunch of values defined
        config.AlertGenerator.section_("bogusPoller")
        # only couch-related pollers require couchURL, this way it'll be used at the
        # other ones as well, should do no harm ; it's just because all pollers are
        # probed here in a single test ...
        config.AlertGenerator.bogusPoller.couchURL = os.getenv("COUCHURL", None)
        config.AlertGenerator.bogusPoller.soft = 5 # [percent]
        config.AlertGenerator.bogusPoller.critical = 50 # [percent]
        config.AlertGenerator.bogusPoller.pollInterval = 0.2  # [second]
        config.AlertGenerator.bogusPoller.period = 0.5
        # currently only CouchErrorsPoller uses this config value
        config.AlertGenerator.bogusPoller.observables = 4000

        # need to create some temp directory, real process and it's
        # Daemon.xml so that is looks like agents component process
        # and check back the information, give its own PID
        pid = os.getpid()
        config.component_("TestComponent")
        d = os.path.dirname(self.testComponentDaemonXml)
        config.TestComponent.componentDir = d
        if not os.path.exists(d):
            os.mkdir(d)
        f = open(self.testComponentDaemonXml, 'w')
        f.write(utils.daemonXmlContent % dict(PID_TO_PUT = pid))
        f.close()

        generator = utils.AlertGeneratorMock(config)
        pollers = []
        for pollerClass in finalPollerClasses:
            p = pollerClass(config.AlertGenerator.bogusPoller, generator)
            # poller may send something during below check(), satisfy sender method
            p.sender = lambda alert: 1 + 1
            pollers.append(p)

        for poller in pollers:
            poller.check()
            if hasattr(poller, "_measurements"):
                mes = poller._measurements
                self.assertEqual(len(mes), 1)
                self.assertTrue(isinstance(mes[0], types.FloatType))
            if hasattr(poller, "_compMeasurements"):
                for measurements in poller._compMeasurements:
                    self.assertEqual(len(measurements), 1)
                    self.assertTrue(isinstance(measurements[0], types.FloatType))

        shutil.rmtree(d)
class SetupCMSSWPsetTest(unittest.TestCase):
    def setUp(self):
        self.testInit = TestInit(__file__)
        self.testInit.setLogging()
        self.testDir = self.testInit.generateWorkDir()
        sys.path.insert(0, os.path.join(WMCore.WMBase.getTestBase(),
                                        "WMCore_t/WMRuntime_t/Scripts_t"))

    def tearDown(self):
        sys.path.remove(os.path.join(WMCore.WMBase.getTestBase(), "WMCore_t/WMRuntime_t/Scripts_t"))
        if 'WMTaskSpace' in sys.modules:
            del sys.modules["WMTaskSpace"]
        self.testInit.delWorkDir()
        os.unsetenv("WMAGENT_SITE_CONFIG_OVERRIDE")

    def createTestStep(self):
        """
        _createTestStep_

        Create a test step that can be passed to the setup script.

        """
        newStep = WMStep("cmsRun1")
        newStepHelper = CMSSWStepHelper(newStep)
        newStepHelper.setStepType("CMSSW")
        newStepHelper.setGlobalTag("SomeGlobalTag")
        stepTemplate = StepFactory.getStepTemplate("CMSSW")
        stepTemplate(newStep)
        newStep.application.command.configuration = "PSet.py"
        return newStepHelper


    def createTestJob(self):
        """
        _createTestJob_

        Create a test job that has parents for each input file.

        """
        newJob = Job(name = "TestJob")
        newJob.addFile(File(lfn = "/some/file/one",
                            parents = set([File(lfn = "/some/parent/one")])))
        newJob.addFile(File(lfn = "/some/file/two",
                            parents = set([File(lfn = "/some/parent/two")])))
        return newJob

    def loadProcessFromPSet(self):
        """
        _loadProcessFromPSet_

        This requires changing the working directory,
        do so in a safe manner to encapsulate the change to this method only
        """

        currentPath = os.getcwd()
        loadedProcess = None
        try:
            if not os.path.isdir(self.testDir):
                raise
            os.chdir(self.testDir)
            testFile = "PSet.py"
            pset = imp.load_source('process', testFile)
            loadedProcess = pset.process
        except Exception as ex:
            self.fail("An exception was caught while trying to load the PSet, %s" % str(ex))
        finally:
            os.chdir(currentPath)

        return loadedProcess

    def testPSetFixup(self):
        """
        _testPSetFixup_

        Verify that all necessary parameters are set in the PSet.

        """
        from WMCore.WMRuntime.Scripts.SetupCMSSWPset import SetupCMSSWPset

        if os.environ.get('CMSSW_VERSION', None):
            del os.environ['CMSSW_VERSION']

        setupScript = SetupCMSSWPset()
        setupScript.step = self.createTestStep()
        setupScript.stepSpace = ConfigSection(name = "stepSpace")
        setupScript.stepSpace.location = self.testDir
        setupScript.job = self.createTestJob()
        setupScript()

        fixedPSet = self.loadProcessFromPSet()

        self.assertEqual(len(fixedPSet.source.fileNames.value), 2,
                         "Error: Wrong number of files.")
        self.assertEqual(len(fixedPSet.source.secondaryFileNames.value), 2,
                         "Error: Wrong number of secondary files.")
        self.assertEqual(fixedPSet.source.fileNames.value[0], "/some/file/one",
                         "Error: Wrong input file.")
        self.assertEqual(fixedPSet.source.fileNames.value[1], "/some/file/two",
                         "Error: Wrong input file.")
        self.assertEqual(fixedPSet.source.secondaryFileNames.value[0], "/some/parent/one",
                         "Error: Wrong input file.")
        self.assertEqual(fixedPSet.source.secondaryFileNames.value[1], "/some/parent/two",
                         "Error: Wrong input file.")
        self.assertEqual(fixedPSet.maxEvents.input.value, -1,
                         "Error: Wrong maxEvents.")

    def testEventsPerLumi(self):
        """
        _testEventsPerLumi_
        Verify that you can put in events per lumi in the process.

        """
        from WMCore.WMRuntime.Scripts.SetupCMSSWPset import SetupCMSSWPset

        os.environ['CMSSW_VERSION'] = "CMSSW_7_4_0"

        setupScript = SetupCMSSWPset()
        setupScript.step = self.createTestStep()
        setupScript.step.setEventsPerLumi(500)
        setupScript.stepSpace = ConfigSection(name = "stepSpace")
        setupScript.stepSpace.location = self.testDir
        setupScript.job = self.createTestJob()
        setupScript()

        fixedPSet = self.loadProcessFromPSet()

        self.assertEqual(len(fixedPSet.source.fileNames.value), 2,
                         "Error: Wrong number of files.")
        self.assertEqual(len(fixedPSet.source.secondaryFileNames.value), 2,
                         "Error: Wrong number of secondary files.")
        self.assertEqual(fixedPSet.source.fileNames.value[0], "/some/file/one",
                         "Error: Wrong input file.")
        self.assertEqual(fixedPSet.source.fileNames.value[1], "/some/file/two",
                         "Error: Wrong input file.")
        self.assertEqual(fixedPSet.source.secondaryFileNames.value[0], "/some/parent/one",
                         "Error: Wrong input file.")
        self.assertEqual(fixedPSet.source.secondaryFileNames.value[1], "/some/parent/two",
                         "Error: Wrong input file.")
        self.assertEqual(fixedPSet.source.numberEventsInLuminosityBlock.value,
                         500, "Error: Wrong number of events per luminosity block")
        self.assertEqual(fixedPSet.maxEvents.input.value, -1,
                         "Error: Wrong maxEvents.")

    def testChainedProcesing(self):
        """
        test for chained CMSSW processing - check the overriden TFC, its values
        and that input files is / are set correctly.

        """
        from WMCore.WMRuntime.Scripts.SetupCMSSWPset import SetupCMSSWPset

        os.environ['CMSSW_VERSION'] = "CMSSW_7_6_0"

        setupScript = SetupCMSSWPset()
        setupScript.step = self.createTestStep()
        setupScript.stepSpace = ConfigSection(name = "stepSpace")
        setupScript.stepSpace.location = self.testDir
        setupScript.job = self.createTestJob()
        setupScript.step.setupChainedProcessing("my_first_step", "my_input_module")
        setupScript()

        # test if the overriden TFC is right
        self.assertTrue(hasattr(setupScript.step.data.application, "overrideCatalog"),
                        "Error: overriden TFC was not set")
        tfc = loadTFC(setupScript.step.data.application.overrideCatalog)
        inputFile = "../my_first_step/my_input_module.root"
        self.assertEqual(tfc.matchPFN("direct", inputFile), inputFile)
        self.assertEqual(tfc.matchLFN("direct", inputFile), inputFile)

        self.assertEqual(setupScript.process.source.fileNames.value, [inputFile])


    def testPileupSetup(self):
        """
        Test the pileup setting.

        reference (setupScript.process instance):
        in test/python/WMCore_t/WMRuntime_t/Scripts_t/WMTaskSpace/cmsRun1/PSet.py

        """
        try:
            from dbs.apis.dbsClient import DbsApi
        except ImportError as ex:
            raise nose.SkipTest

        # this is modified and shortened version of
        # WMCore/test/python/WMCore_t/Misc_t/site-local-config.xml
        # since the dataset name in question (below) is only present at
        # storm-fe-cms.cr.cnaf.infn.it, need to make the test think it's its local SE
        siteLocalConfigContent = \
        """
<site-local-config>
    <site name="-SOME-SITE-NAME-">
        <event-data>
            <catalog url="trivialcatalog_file:/uscmst1/prod/sw/cms/SITECONF/T1_US_FNAL/PhEDEx/storage.xml?protocol=dcap"/>
        </event-data>
        <local-stage-out>
            <!-- original cmssrm.fnal.gov -->
            <phedex-node value="T2_CH_CERN"/>
            <command value="test-copy"/>
            <catalog url="trivialcatalog_file:/uscmst1/prod/sw/cms/SITECONF/T1_US_FNAL/PhEDEx/storage.xml?protocol=dcap"/>
        </local-stage-out>
        <calib-data>
            <frontier-connect>
                <load balance="proxies"/>
                <proxy url="http://cmsfrontier1.fnal.gov:3128"/>
                <proxy url="http://cmsfrontier2.fnal.gov:3128"/>
            </frontier-connect>
        </calib-data>
    </site>
</site-local-config>
"""
        siteLocalConfig = os.path.join(self.testDir, "test-site-local-config.xml")
        f = open(siteLocalConfig, 'w')
        f.write(siteLocalConfigContent)
        f.close()

        from WMCore.WMRuntime.Scripts.SetupCMSSWPset import SetupCMSSWPset
        setupScript = SetupCMSSWPset()
        setupScript.step = self.createTestStep()
        setupScript.stepSpace = ConfigSection(name = "stepSpace")
        setupScript.stepSpace.location = os.path.join(self.testDir, "cmsRun1")
        setupScript.job = self.createTestJob()
        # define pileup configuration
        # despite of the implementation considering whichever type of pileup,
        # only "data" and "mc" types are eventually considered and lead to any
        # modifications of job input files
        pileupConfig = {"data": ["/Mu/PenguinsPenguinsEverywhere-SingleMu-HorriblyJaundicedYellowEyedPenginsSearchingForCarrots-v31/RECO"],
                        "mc": ["/Mu/PenguinsPenguinsEverywhere-SingleMu-HorriblyJaundicedYellowEyedPenginsSearchingForCarrots-v31/RECO"]}
        dbsUrl = "https://cmsweb.cern.ch/dbs/prod/global/DBSReader"
        setupScript.step.setupPileup(pileupConfig, dbsUrl)
        # SetupCMSSWPset pileup handling will be consulting SiteLocalConfig
        # to determine StorageElement (SE) name the job is running on
        # SiteLocalConfig loads the site-local-config.xml file from env.
        # variable defined location ; if the variable is not defined already, set it
        # obviously, if "WMAGENT_SITE_CONFIG_OVERRIDE" is already set here, the above
        # thick with SE name is not effective
        if not os.getenv("WMAGENT_SITE_CONFIG_OVERRIDE", None):
            os.environ["WMAGENT_SITE_CONFIG_OVERRIDE"] = siteLocalConfig
        # find out local site name from the testing local site config,
        # will be needed later
        siteConfig = loadSiteLocalConfig()
        seLocalName = siteConfig.localStageOut["phedex-node"]
        print("Running on site '%s', local SE name: '%s'" % (siteConfig.siteName, seLocalName))

        # before calling the script, SetupCMSSWPset will try to load JSON
        # pileup configuration file, need to create it in self.testDir
        fetcher = PileupFetcher()
        fetcher.setWorkingDirectory(self.testDir)
        fetcher._createPileupConfigFile(setupScript.step, fakeSites=['T1_US_FNAL'])

        setupScript()

        # now test all modifications carried out in SetupCMSSWPset.__call__
        # which will also test that CMSSWStepHelper.setupPileup run correctly
        mixModules, dataMixModules = setupScript._getPileupMixingModules()

        # load in the pileup configuration in the form of dict which
        # PileupFetcher previously saved in a JSON file
        pileupDict = setupScript._getPileupConfigFromJson()

        # get the sub dict for particular pileup type
        # for pileupDict structure description - see PileupFetcher._queryDbsAndGetPileupConfig
        for pileupType, modules in zip(("data", "mc"), (dataMixModules, mixModules)):
            # getting KeyError here - above pileupConfig is not correct - need
            # to have these two types of pile type
            d = pileupDict[pileupType]
            self._mixingModulesInputFilesTest(modules, d, seLocalName)


    def _mixingModulesInputFilesTest(self, modules, pileupSubDict, seLocalName):
        """
        pileupSubDic - contains only dictionary for particular pile up type

        """
        # consider only locally available files
        filesInConfigDict = []
        for v in pileupSubDict.values():
            if seLocalName in v["phedexNodeNames"]:
                filesInConfigDict.extend(v["FileList"])

        for m in modules:
            inputTypeAttrib = getattr(m, "input", None) or getattr(m, "secsource", None)
            fileNames = inputTypeAttrib.fileNames.value
            if fileNames == None:
                fileNames = []
            m = ("Pileup configuration file list '%s' and mixing modules input "
                 "filelist '%s' are not identical." % (filesInConfigDict, fileNames))
            self.assertEqual(sorted(filesInConfigDict), sorted(fileNames), m)
Exemple #37
0
class CMSSW_t(unittest.TestCase):
    def setUp(self):
        """
        _setUp_

        Build a testing environment similar to a WN
        """
        self.testInit = TestInit(__file__)
        self.testDir = self.testInit.generateWorkDir()

        # Build a workload/task/step with the basic required information
        self.workload = newWorkload("UnitTests")
        self.task = self.workload.newTask("CMSSWExecutor")
        stepHelper = self.task.makeStep("ExecutorTest")
        self.step = stepHelper.data
        template = CMSSWTemplate()
        template(self.step)
        self.helper = template.helper(self.step)
        self.step.application.setup.scramCommand = "scramulator.py"
        self.step.application.command.executable = "cmsRun.py"
        self.step.application.setup.scramProject = "CMSSW"
        self.step.application.setup.scramArch = "slc5_ia32_gcc434"
        self.step.application.setup.cmsswVersion = "CMSSW_X_Y_Z"
        self.step.application.setup.softwareEnvironment = "echo \"Software Setup...\";"
        self.step.output.jobReport = "FrameworkJobReport.xml"
        self.helper.addOutputModule("outputRECORECO",
                                    primaryDataset="Bogus",
                                    processedDataset="Test-Era-v1",
                                    dataTier="DATA")
        self.helper.addOutputModule("outputALCARECORECO",
                                    primaryDataset="Bogus",
                                    processedDataset="Test-Era-v1",
                                    dataTier="DATA")
        self.helper.setGlobalTag("Bogus")
        taskMaker = TaskMaker(self.workload, self.testDir)
        taskMaker.skipSubscription = True
        taskMaker.processWorkload()

        # Build the TaskSpace/StepSpace
        self.sandboxDir = os.path.join(self.testDir, "UnitTests")
        self.task.build(self.testDir)
        sys.path.append(self.testDir)
        sys.path.append(self.sandboxDir)

        # Copy the files that cmsRun would have generated in the step space
        open(os.path.join(self.step.builder.workingDir, "outputRECORECO.root"),
             "w").close()
        open(
            os.path.join(self.step.builder.workingDir,
                         "outputALCARECORECO.root"), "w").close()
        shutil.copy(
            os.path.join(getTestBase(),
                         "WMCore_t/FwkJobReport_t/CMSSWProcessingReport.xml"),
            os.path.join(self.step.builder.workingDir,
                         "FrameworkJobReport.xml"))

        # Create a job
        self.job = Job(name="/UnitTest/CMSSWExecutor/ExecutorTest-test-job")
        self.job["id"] = 1

        # Set the PATH
        binDir = inspect.getsourcefile(ModuleLocator)
        binDir = binDir.replace("__init__.py", "bin")

        if not binDir in os.environ['PATH']:
            os.environ['PATH'] = "%s:%s" % (os.environ['PATH'], binDir)

        self.oldCwd = os.getcwd()

    def tearDown(self):
        """
        _tearDown_

        Clean up
        """
        self.testInit.delWorkDir()
        sys.path.remove(self.testDir)
        sys.path.remove(self.sandboxDir)
        try:
            sys.modules.pop("WMTaskSpace")
            sys.modules.pop("WMTaskSpace.ExecutorTest")
            sys.modules.pop("WMTaskSpace.ExecutorTest.WMCore")
            sys.modules.pop("WMSandbox")
            sys.modules.pop("WMSandbox.CMSSWExecutor")
            sys.modules.pop("WMSandbox.CMSSWExecutor.ExecutorTest")
        except KeyError:
            return

        return

    def testA_Execute(self):
        """
        _Execute_

        Test the full path, including execute.
        """
        try:
            os.chdir(self.step.builder.workingDir)
            executor = CMSSWExecutor()
            executor.initialise(self.step, self.job)
            executor.pre()
            # Remove the scram pre-script as it requires an actual SCRAM environment
            executor.step.runtime.scramPreScripts.remove("SetupCMSSWPset")
            executor.execute()
            executor.post()
            # Check that there were no errors
            self.assertEqual(0, executor.report.getExitCode())
            # Check that we processed the XML
            self.assertEqual(2, len(executor.report.getAllFiles()))
            # Check that the executable was really executed
            self.assertTrue(
                os.path.isfile(
                    os.path.join(self.step.builder.workingDir,
                                 "BogusFile.txt")))
        except Exception as ex:
            self.fail("Failure encountered, %s" % str(ex))
        finally:
            os.chdir(self.oldCwd)
        return

    def testB_ExecuteNonZeroExit(self):
        """
        _ExecuteNonZeroExit_

        Test the execution of a script
        which exits with non-zero code.
        """
        self.step.application.command.executable = "brokenCmsRun.py"
        shutil.copy(
            os.path.join(getTestBase(),
                         "WMCore_t/FwkJobReport_t/CMSSWFailReport.xml"),
            os.path.join(self.step.builder.workingDir,
                         "FrameworkJobReport.xml"))
        try:
            os.chdir(self.step.builder.workingDir)
            executor = StepFactory.getStepExecutor("CMSSW")
            executor.initialise(self.step, self.job)
            executor.pre()
            executor.step.runtime.scramPreScripts.remove("SetupCMSSWPset")
            try:
                executor.execute()
                self.fail("An exception should have been raised")
            except WMExecutionFailure as ex:
                executor.diagnostic(ex.code, executor, ExceptionInstance=ex)
                self.assertEqual(8001, executor.report.getExitCode())
                report = Report()
                report.load("Report.pkl")
                self.assertEqual(8001, report.getExitCode())
        except Exception as ex:
            self.fail("Failure encountered, %s" % str(ex))
        finally:
            os.chdir(self.oldCwd)
        return

    def testC_ExecuteSegfault(self):
        """
        _ExecuteSegfault_

        Test the execution of a script
        which raises a ABRT signal which
        is the normal CMSSW response
        to a SEGFAULT.
        """
        self.step.application.command.executable = "test.sh"
        # CMSSW leaves an empty FWJR when a SEGFAULT is present
        open(
            os.path.join(self.step.builder.workingDir,
                         "FrameworkJobReport.xml"), "w").close()
        try:
            os.chdir(self.step.builder.workingDir)
            executor = StepFactory.getStepExecutor("CMSSW")
            executor.initialise(self.step, self.job)
            executor.pre()
            executor.step.runtime.scramPreScripts.remove("SetupCMSSWPset")
            try:
                executor.execute()
                self.fail("An exception should have been raised")
            except WMExecutionFailure as ex:
                executor.diagnostic(ex.code, executor, ExceptionInstance=ex)
                #self.assertEqual(50115, executor.report.getExitCode())
                self.assertEqual(134, executor.report.getExitCode())
                report = Report()
                report.load("Report.pkl")
                self.assertEqual(134, report.getExitCode())
        except Exception as ex:
            self.fail("Failure encountered, %s" % str(ex))
        finally:
            os.chdir(self.oldCwd)
        return

    def testD_ExecuteNoOutput(self):
        """
        _ExecuteNoOutput_

        Test what happens when no output is produced,
        the proper error should be included.
        """
        self.step.application.command.executable = "cmsRun.py"
        shutil.copy(
            os.path.join(getTestBase(),
                         "WMCore_t/FwkJobReport_t/CMSSWSkippedAll.xml"),
            os.path.join(self.step.builder.workingDir,
                         "FrameworkJobReport.xml"))
        try:
            os.chdir(self.step.builder.workingDir)
            executor = StepFactory.getStepExecutor("CMSSW")
            executor.initialise(self.step, self.job)
            executor.pre()
            executor.step.runtime.scramPreScripts.remove("SetupCMSSWPset")
            executor.execute()
            executor.post()
            self.assertEqual(60450, executor.report.getExitCode())
        except Exception as ex:
            self.fail("Failure encountered, %s" % str(ex))
        finally:
            os.chdir(self.oldCwd)
        return
Exemple #38
0
class otherStageOutTexst(unittest.TestCase):
    def setUp(self):
        # stolen from CMSSWExecutor_t. thanks, dave

        # first, delete all the sandboxen and taskspaces
        #    because of caching, this leaks from other tests in other files
        #    this sucks because the other tests are using sandboxen that
        #    are deleted after the test is over, which causes theses tests
        #    to break
        modsToDelete = []
        # not sure what happens if you delete from
        # an arrey you're iterating over. doing it in
        # two steps
        for modname in sys.modules.keys():
            # need to blow away things in sys.modules, otherwise
            # they are cached and we look at old taskspaces
            if modname.startswith('WMTaskSpace'):
                modsToDelete.append(modname)
            if modname.startswith('WMSandbox'):
                modsToDelete.append(modname)
        for modname in modsToDelete:
            try:
                reload(sys.modules[modname])
            except Exception:
                pass
            del sys.modules[modname]

        self.oldpath = sys.path[:]
        self.testInit = TestInit(__file__)

        self.testDir = self.testInit.generateWorkDir()
        self.job = Job(name="/UnitTests/DeleterTask/DeleteTest-test-job")
        shutil.copyfile('/etc/hosts', os.path.join(self.testDir, 'testfile'))

        self.workload = newWorkload("UnitTests")
        self.task = self.workload.newTask("DeleterTask")

        cmsswHelper = self.task.makeStep("cmsRun1")
        cmsswHelper.setStepType('CMSSW')
        stepHelper = cmsswHelper.addStep("DeleteTest")
        stepHelper.setStepType('StageOut')

        self.cmsswstep = cmsswHelper.data
        self.cmsswHelper = cmsswHelper

        self.stepdata = stepHelper.data
        self.stephelp = StageOutTemplate.StageOutStepHelper(stepHelper.data)
        self.task.applyTemplates()

        self.executor = StepFactory.getStepExecutor(self.stephelp.stepType())
        taskMaker = TaskMaker(self.workload, os.path.join(self.testDir))
        taskMaker.skipSubscription = True
        taskMaker.processWorkload()

        self.task.build(os.path.join(self.testDir, 'UnitTests'))

        sys.path.insert(0, self.testDir)
        sys.path.insert(0, os.path.join(self.testDir, 'UnitTests'))

        #        binDir = inspect.getsourcefile(ModuleLocator)
        #        binDir = binDir.replace("__init__.py", "bin")
        #
        #        if not binDir in os.environ['PATH']:
        #            os.environ['PATH'] = "%s:%s" % (os.environ['PATH'], binDir)
        open(os.path.join(self.testDir, 'UnitTests', '__init__.py'),
             'w').close()
        shutil.copyfile(
            os.path.join(os.path.dirname(__file__), 'MergeSuccess.pkl'),
            os.path.join(self.testDir, 'UnitTests', 'WMTaskSpace', 'cmsRun1',
                         'Report.pkl'))

    def tearDown(self):
        sys.path = self.oldpath[:]
        self.testInit.delWorkDir()

        # making double sure WMTaskSpace and WMSandbox are gone
        modsToDelete = []
        # not sure what happens if you delete from
        # an arrey you're iterating over. doing it in
        # two steps
        for modname in sys.modules.keys():
            # need to blow away things in sys.modules, otherwise
            # they are cached and we look at old taskspaces
            if modname.startswith('WMTaskSpace'):
                modsToDelete.append(modname)
            if modname.startswith('WMSandbox'):
                modsToDelete.append(modname)
        for modname in modsToDelete:
            try:
                reload(sys.modules[modname])
            except Exception:
                pass
            del sys.modules[modname]
        myThread = threading.currentThread()
        if hasattr(myThread, "factory"):
            myThread.factory = {}

    @attr('integration')
    def testCPBackendStageOutAgainstReportNew(self):
        myReport = Report()
        myReport.unpersist(
            os.path.join(self.testDir, 'UnitTests', 'WMTaskSpace', 'cmsRun1',
                         'Report.pkl'))
        myReport.data.cmsRun1.status = 0
        myReport.persist(
            os.path.join(self.testDir, 'UnitTests', 'WMTaskSpace', 'cmsRun1',
                         'Report.pkl'))
        executor = StageOutExecutor.StageOut()
        executor.initialise(self.stepdata, self.job)
        self.setLocalOverride(self.stepdata)
        self.stepdata.override.newStageOut = True
        executor.step = self.stepdata
        executor.execute()
        self.assertTrue(os.path.exists(os.path.join(self.testDir, 'hosts')))
        self.assertTrue(
            os.path.exists(os.path.join(self.testDir, 'test1', 'hosts')))

    @attr('integration')
    def testCPBackendStageOutAgainstReportFailedStepNew(self):
        myReport = Report()
        myReport.unpersist(
            os.path.join(self.testDir, 'UnitTests', 'WMTaskSpace', 'cmsRun1',
                         'Report.pkl'))
        myReport.data.cmsRun1.status = 1
        myReport.persist(
            os.path.join(self.testDir, 'UnitTests', 'WMTaskSpace', 'cmsRun1',
                         'Report.pkl'))
        executor = StageOutExecutor.StageOut()
        executor.initialise(self.stepdata, self.job)
        self.setLocalOverride(self.stepdata)
        self.stepdata.override.newStageOut = True
        executor.step = self.stepdata
        executor.execute()
        self.assertFalse(os.path.exists(os.path.join(self.testDir, 'hosts')))
        self.assertFalse(
            os.path.exists(os.path.join(self.testDir, 'test1', 'hosts')))
        return

    @attr('integration')
    def testCPBackendStageOutAgainstReportOld(self):

        myReport = Report()
        myReport.unpersist(
            os.path.join(self.testDir, 'UnitTests', 'WMTaskSpace', 'cmsRun1',
                         'Report.pkl'))
        myReport.data.cmsRun1.status = 0
        myReport.persist(
            os.path.join(self.testDir, 'UnitTests', 'WMTaskSpace', 'cmsRun1',
                         'Report.pkl'))
        executor = StageOutExecutor.StageOut()
        executor.initialise(self.stepdata, self.job)
        self.setLocalOverride(self.stepdata)
        executor.step = self.stepdata
        executor.execute()
        self.assertTrue(os.path.exists(os.path.join(self.testDir, 'hosts')))
        self.assertTrue(
            os.path.exists(os.path.join(self.testDir, 'test1', 'hosts')))
        return

    @attr('integration')
    def testCPBackendStageOutAgainstReportFailedStepOld(self):
        myReport = Report()
        myReport.unpersist(
            os.path.join(self.testDir, 'UnitTests', 'WMTaskSpace', 'cmsRun1',
                         'Report.pkl'))
        myReport.data.cmsRun1.status = 1
        myReport.persist(
            os.path.join(self.testDir, 'UnitTests', 'WMTaskSpace', 'cmsRun1',
                         'Report.pkl'))

        executor = StageOutExecutor.StageOut()
        executor.initialise(self.stepdata, self.job)
        self.setLocalOverride(self.stepdata)
        executor.step = self.stepdata
        executor.execute()
        self.assertFalse(os.path.exists(os.path.join(self.testDir, 'hosts')))
        self.assertFalse(
            os.path.exists(os.path.join(self.testDir, 'test1', 'hosts')))
        return

    @attr('workerNodeTest')
    def testOnWorkerNodes(self):
        raise RuntimeError
        # Stage a file out, stage it back in, check it, delete it
        myReport = Report()
        myReport.unpersist(
            os.path.join(self.testDir, 'UnitTests', 'WMTaskSpace', 'cmsRun1',
                         'Report.pkl'))
        myReport.data.cmsRun1.status = 1
        del myReport.data.cmsRun1.output
        myReport.data.cmsRun1.section_('output')
        myReport.data.cmsRun1.output.section_('stagingTestOutput')
        myReport.data.cmsRun1.output.stagingTestOutput.section_('files')
        myReport.data.cmsRun1.output.stagingTestOutput.fileCount = 0
        targetFiles = [
            '/store/temp/WMAgent/storetest-%s' % time.time(),
            '/store/unmerged/WMAgent/storetest-%s' % time.time()
        ]

        for file in targetFiles:
            print("Adding file for StageOut %s" % file)
            self.addStageOutFile(myReport, file)

        myReport.persist(
            os.path.join(self.testDir, 'UnitTests', 'WMTaskSpace', 'cmsRun1',
                         'Report.pkl'))
        executor = StageOutExecutor.StageOut()

        executor.initialise(self.stepdata, self.job)
        executor.step = self.stepdata
        print("beginning stageout")
        executor.execute()
        print("stageout done")

        # pull in the report with the stage out info
        myReport = Report()
        myReport.unpersist(
            os.path.join(self.testDir, 'UnitTests', 'WMTaskSpace', 'cmsRun1',
                         'Report.pkl'))
        print("Got the stage out data back")
        print(myReport.data)

        # now, transfer them back
        # TODO make a stagein step in the task - Melo
        import WMCore.Storage.FileManager as FileManagerModule
        fileManager = FileManagerModule.FileManager(numberOfRetries=10,
                                                    retryPauseTime=1)
        for file in targetFiles:
            print("Staging in %s" % file)

            fileManager.stageOut(fileToStage={
                'LFN': file,
                'PFN': '%s/%s' % (self.testDir, file)
            })
            self.assertTrue(os.path.exists('%s/%s' % (self.testDir, file)))
            # self.assertEqual(os.path.getsize('/etc/hosts', '%s/%s' % (self.testDir, file))) This makes no sense - EWV

        # now, should delete the files we made
        for file in targetFiles:
            print("deleting %s" % file)
            fileManager.deleteLFN(file)

        # try staging in again to make sure teh files are gone
        for file in targetFiles:
            print("Staging in (should fail) %s" % file)
            self.assertRaises(StageOutError,
                              FileManagerModule.FileManager.stageOut,
                              fileManager,
                              fileToStage={
                                  'LFN': file,
                                  'PFN': '%s/%s' % (self.testDir, file)
                              },
                              stageOut=False)

        # need to make sure files didn't show up
        self.assertFalse(os.path.exists(os.path.join(self.testDir, 'hosts')))
        self.assertFalse(
            os.path.exists(os.path.join(self.testDir, 'test1', 'hosts')))

    def addStageOutFile(self, myReport, lfn):
        myId = myReport.data.cmsRun1.output.stagingTestOutput.fileCount
        mySection = myReport.data.cmsRun1.output.stagingTestOutput.section_(
            'file%s' % myId)
        mySection.section_('runs')
        setattr(mySection.runs, '114475', [33])
        mySection.section_('branches')
        mySection.lfn = lfn
        mySection.dataset = {
            'applicationName': 'cmsRun',
            'primaryDataset': 'Calo',
            'processedDataset': 'Commissioning09-PromptReco-v8',
            'dataTier': 'ALCARECO',
            'applicationVersion': 'CMSSW_3_2_7'
        }
        mySection.module_label = 'ALCARECOStreamCombined'
        mySection.parents = []
        mySection.location = 'srm-cms.cern.ch'
        mySection.checksums = {'adler32': 'bbcf2215', 'cksum': '2297542074'}
        mySection.pfn = '/etc/hosts'
        mySection.events = 20000
        mySection.merged = False
        mySection.size = 37556367
        myReport.data.cmsRun1.output.stagingTestOutput.fileCount = myId + 1

    def setLocalOverride(self, step):
        step.section_('override')
        step.override.command = 'cp'
        step.override.option = ''
        step.override.__setattr__('lfn-prefix', self.testDir + "/")
        step.override.__setattr__('phedex-node', 'DUMMYPNN')
Exemple #39
0
class otherLogArchiveTexst:#(unittest.TestCase):

    def setUp(self):
        # stolen from CMSSWExecutor_t. thanks, dave

        # first, delete all the sandboxen and taskspaces
        #    because of caching, this leaks from other tests in other files
        #    this sucks because the other tests are using sandboxen that
        #    are deleted after the test is over, which causes theses tests
        #    to break
        modsToDelete = []
        # not sure what happens if you delete from
        # an arrey you're iterating over. doing it in
        # two steps
        for modname in sys.modules.keys():
            # need to blow away things in sys.modules, otherwise
            # they are cached and we look at old taskspaces
            if modname.startswith('WMTaskSpace'):
                modsToDelete.append(modname)
            if modname.startswith('WMSandbox'):
                modsToDelete.append(modname)
        for modname in modsToDelete:
            try:
                reload(sys.modules[modname])
            except:
                pass
            del sys.modules[modname]

        self.oldpath = sys.path[:]
        self.testInit = TestInit(__file__)


        self.testDir = self.testInit.generateWorkDir()
        self.job = Job(name = "/UnitTests/DeleterTask/DeleteTest-test-job")
        shutil.copyfile('/etc/hosts', os.path.join(self.testDir, 'testfile'))

        self.workload = newWorkload("UnitTests")
        self.task = self.workload.newTask("DeleterTask")

        cmsswHelper = self.task.makeStep("cmsRun1")
        cmsswHelper.setStepType('CMSSW')
        stepHelper = cmsswHelper.addStep("DeleteTest")
        stepHelper.setStepType('LogArchive')

        self.cmsswstep = cmsswHelper.data
        self.cmsswHelper = cmsswHelper


        self.stepdata = stepHelper.data
        self.stephelp = LogArchiveTemplate.LogArchiveStepHelper(stepHelper.data)
        self.task.applyTemplates()

        self.executor = StepFactory.getStepExecutor(self.stephelp.stepType())
        taskMaker = TaskMaker(self.workload, os.path.join(self.testDir))
        taskMaker.skipSubscription = True
        taskMaker.processWorkload()


        self.task.build(os.path.join(self.testDir, 'UnitTests'))

        sys.path.insert(0, self.testDir)
        sys.path.insert(0, os.path.join(self.testDir, 'UnitTests'))


#        binDir = inspect.getsourcefile(ModuleLocator)
#        binDir = binDir.replace("__init__.py", "bin")
#
#        if not binDir in os.environ['PATH']:
#            os.environ['PATH'] = "%s:%s" % (os.environ['PATH'], binDir)
        open( os.path.join( self.testDir, 'UnitTests', '__init__.py'),'w').close()
        shutil.copyfile( os.path.join( os.path.dirname( __file__ ), 'MergeSuccess.pkl'),
                         os.path.join( self.testDir, 'UnitTests', 'WMTaskSpace', 'cmsRun1' , 'Report.pkl'))

    def tearDown(self):
        sys.path = self.oldpath[:]
        self.testInit.delWorkDir()


        # making double sure WMTaskSpace and WMSandbox are gone
        modsToDelete = []
        # not sure what happens if you delete from
        # an arrey you're iterating over. doing it in
        # two steps
        for modname in sys.modules.keys():
            # need to blow away things in sys.modules, otherwise
            # they are cached and we look at old taskspaces
            if modname.startswith('WMTaskSpace'):
                modsToDelete.append(modname)
            if modname.startswith('WMSandbox'):
                modsToDelete.append(modname)
        for modname in modsToDelete:
            try:
                reload(sys.modules[modname])
            except:
                pass
            del sys.modules[modname]
        myThread = threading.currentThread()
        if hasattr(myThread, "factory"):
            myThread.factory = {}

    @attr('integration')
    def testCPBackendLogArchiveAgainstReportNew(self):
        myReport = Report()
        myReport.unpersist(os.path.join( self.testDir, 'UnitTests','WMTaskSpace', 'cmsRun1' , 'Report.pkl'))
        myReport.data.cmsRun1.status = 0
        myReport.persist(os.path.join( self.testDir,'UnitTests', 'WMTaskSpace', 'cmsRun1' , 'Report.pkl'))
        executor = LogArchiveExecutor.LogArchive()
        executor.initialise( self.stepdata, self.job)
        self.setLocalOverride(self.stepdata)
        self.stepdata.override.newLogArchive = True
        executor.step = self.stepdata
        executor.execute( )
        self.assertTrue( os.path.exists( os.path.join( self.testDir, 'hosts' )))
        self.assertTrue( os.path.exists( os.path.join( self.testDir, 'test1', 'hosts')))

    @attr('integration')
    def testCPBackendLogArchiveAgainstReportFailedStepNew(self):
        myReport = Report()
        myReport.unpersist(os.path.join( self.testDir, 'UnitTests','WMTaskSpace', 'cmsRun1' , 'Report.pkl'))
        myReport.data.cmsRun1.status = 1
        myReport.persist(os.path.join( self.testDir,'UnitTests', 'WMTaskSpace', 'cmsRun1' , 'Report.pkl'))
        executor = LogArchiveExecutor.LogArchive()
        executor.initialise( self.stepdata, self.job)
        self.setLocalOverride(self.stepdata)
        self.stepdata.override.newLogArchive = True
        executor.step = self.stepdata
        executor.execute( )
        self.assertFalse( os.path.exists( os.path.join( self.testDir, 'hosts' )))
        self.assertFalse( os.path.exists( os.path.join( self.testDir, 'test1', 'hosts')))
        return

    @attr('integration')
    def testCPBackendLogArchiveAgainstReportOld(self):

        myReport = Report()
        myReport.unpersist(os.path.join( self.testDir,'UnitTests', 'WMTaskSpace', 'cmsRun1' , 'Report.pkl'))
        myReport.data.cmsRun1.status = 0
        myReport.persist(os.path.join( self.testDir,'UnitTests', 'WMTaskSpace', 'cmsRun1' , 'Report.pkl'))
        executor = LogArchiveExecutor.LogArchive()
        executor.initialise( self.stepdata, self.job)
        self.setLocalOverride(self.stepdata)
        executor.step = self.stepdata
        executor.execute( )
        self.assertTrue( os.path.exists( os.path.join( self.testDir, 'hosts' )))
        self.assertTrue( os.path.exists( os.path.join( self.testDir, 'test1', 'hosts')))
        return

    @attr('integration')
    def testCPBackendLogArchiveAgainstReportFailedStepOld(self):
        myReport = Report()
        myReport.unpersist(os.path.join( self.testDir,'UnitTests', 'WMTaskSpace', 'cmsRun1' , 'Report.pkl'))
        myReport.data.cmsRun1.status = 1
        myReport.persist(os.path.join( self.testDir, 'UnitTests','WMTaskSpace', 'cmsRun1' , 'Report.pkl'))

        executor = LogArchiveExecutor.LogArchive()
        executor.initialise( self.stepdata, self.job)
        self.setLocalOverride(self.stepdata)
        executor.step = self.stepdata
        executor.execute( )
        self.assertFalse( os.path.exists( os.path.join( self.testDir, 'hosts' )))
        self.assertFalse( os.path.exists( os.path.join( self.testDir, 'test1', 'hosts')))
        return

    @attr('workerNodeTest')
    def testOnWorkerNodes(self):
        raise RuntimeError
        # Stage a file out, stage it back in, check it, delete it
        myReport = Report()
        myReport.unpersist(os.path.join( self.testDir,'UnitTests', 'WMTaskSpace', 'cmsRun1' , 'Report.pkl'))
        myReport.data.cmsRun1.status = 1
        del myReport.data.cmsRun1.output
        myReport.data.cmsRun1.section_('output')
        myReport.data.cmsRun1.output.section_('stagingTestOutput')
        myReport.data.cmsRun1.output.stagingTestOutput.section_('files')
        myReport.data.cmsRun1.output.stagingTestOutput.fileCount = 0
        targetFiles = [ '/store/temp/WMAgent/storetest-%s' % time.time(),
                        '/store/unmerged/WMAgent/storetest-%s' % time.time()]

        for file in targetFiles:
            print("Adding file for LogArchive %s" % file)
            self.addLogArchiveFile(myReport, file )

        myReport.persist(os.path.join( self.testDir, 'UnitTests','WMTaskSpace', 'cmsRun1' , 'Report.pkl'))
        executor = LogArchiveExecutor.LogArchive()

        executor.initialise( self.stepdata, self.job)
        executor.step = self.stepdata
        print("beginning stageout")
        executor.execute( )
        print("stageout done")

        # pull in the report with the stage out info
        myReport = Report()
        myReport.unpersist(os.path.join( self.testDir,'UnitTests', 'WMTaskSpace', 'cmsRun1' , 'Report.pkl'))
        print("Got the stage out data back")
        print(myReport.data)


        # now, transfer them back
        # TODO make a stagein step in the task - Melo
        import WMCore.Storage.FileManager as FileManagerModule
        fileManager = FileManagerModule.FileManager( numberOfRetries = 10, retryPauseTime = 1)
        for file in targetFiles:
            print("Staging in %s" % file)

            fileManager.stageOut( fileToStage = { 'LFN' : file,
                                    'PFN' : '%s/%s' % (self.testDir, file) },
                                    stageOut = False)
            self.assertTrue( os.path.exists( '%s/%s' % (self.testDir, file)))
            self.assertEqual( os.path.getsize('/etc/hosts', '%s/%s' % (self.testDir, file)))

        # now, should delete the files we made
        for file in targetFiles:
            print("deleting %s" % file)
            fileManager.deleteLFN(file)

        # try staging in again to make sure teh files are gone
        for file in targetFiles:
            print("Staging in (should fail) %s" % file)
            self.assertRaises( LogArchiveError, \
                               FileManagerModule.FileManager.stageOut, \
                               fileManager,fileToStage = { 'LFN' : file,
                                    'PFN' : '%s/%s' % (self.testDir, file) },
                                    stageOut = False )



        # need to make sure files didn't show up
        self.assertFalse( os.path.exists( os.path.join( self.testDir, 'hosts' )))
        self.assertFalse( os.path.exists( os.path.join( self.testDir, 'test1', 'hosts')))

    def addLogArchiveFile(self, myReport, lfn):
        myId = myReport.data.cmsRun1.output.stagingTestOutput.fileCount
        mySection = myReport.data.cmsRun1.output.stagingTestOutput.section_('file%s' % myId)
        mySection.section_('runs')
        setattr(mySection.runs,'114475', [33])
        mySection.section_('branches')
        mySection.lfn = lfn
        mySection.dataset = {'applicationName': 'cmsRun', 'primaryDataset': 'Calo', 'processedDataset': 'Commissioning09-PromptReco-v8', 'dataTier': 'ALCARECO', 'applicationVersion': 'CMSSW_3_2_7'}
        mySection.module_label = 'ALCARECOStreamCombined'
        mySection.parents = []
        mySection.location = 'srm-cms.cern.ch'
        mySection.checksums = {'adler32': 'bbcf2215', 'cksum': '2297542074'}
        mySection.pfn = '/etc/hosts'
        mySection.events = 20000
        mySection.merged = False
        mySection.size = 37556367
        myReport.data.cmsRun1.output.stagingTestOutput.fileCount = myId + 1





    def setLocalOverride(self, step):
        step.section_('override')
        step.override.command    = 'cp'
        step.override.option     = ''
        step.override.__setattr__('lfn-prefix', self.testDir +"/")
        step.override.__setattr__('se-name','DUMMYSE')
        step.override.__setattr__('phedex-node','DUMMYPNN')
Exemple #40
0
class SetupCMSSWPsetTest(unittest.TestCase):
    def setUp(self):
        self.testInit = TestInit(__file__)
        self.testInit.setLogging()
        self.testDir = self.testInit.generateWorkDir()
        sys.path.insert(
            0,
            os.path.join(WMCore.WMBase.getTestBase(),
                         "WMCore_t/WMRuntime_t/Scripts_t"))

    def tearDown(self):
        sys.path.remove(
            os.path.join(WMCore.WMBase.getTestBase(),
                         "WMCore_t/WMRuntime_t/Scripts_t"))
        if 'WMTaskSpace' in sys.modules:
            del sys.modules["WMTaskSpace"]
        self.testInit.delWorkDir()
        os.unsetenv("WMAGENT_SITE_CONFIG_OVERRIDE")

    def createTestStep(self):
        """
        _createTestStep_

        Create a test step that can be passed to the setup script.

        """
        newStep = WMStep("cmsRun1")
        stepTemplate = StepFactory.getStepTemplate("CMSSW")
        stepTemplate.install(newStep)
        newStepHelper = stepTemplate.helper(newStep)

        newStepHelper.setStepType("CMSSW")
        newStepHelper.setGlobalTag("SomeGlobalTag")
        newStepHelper.data.application.section_("setup")
        newStepHelper.cmsswSetup("CMSSW_11_0_2",
                                 scramArch=['slc7_amd64_gcc820'])

        return newStepHelper

    def createTestJob(self):
        """
        _createTestJob_

        Create a test job that has parents for each input file.

        """
        newJob = Job(name="TestJob")
        newJob.addFile(
            File(lfn="/some/file/one",
                 parents=set([File(lfn="/some/parent/one")])))
        newJob.addFile(
            File(lfn="/some/file/two",
                 parents=set([File(lfn="/some/parent/two")])))
        return newJob

    def loadProcessFromPSet(self, psetPath=None):
        """
        _loadProcessFromPSet_

        This requires changing the working directory,
        do so in a safe manner to encapsulate the change to this method only
        """
        from WMCore.WMRuntime.Scripts.SetupCMSSWPset import Unpickler
        currentPath = os.getcwd()
        loadedProcess = None

        if psetPath is None:
            psetPath = self.testDir
        with open(os.path.join(psetPath, "PSet.pkl")) as f:
            pset = Unpickler(f).load()

        os.chdir(currentPath)

        return pset

    def testPSetFixup(self):
        """
        _testPSetFixup_

        Verify that all necessary parameters are set in the PSet.

        """
        from WMCore.WMRuntime.Scripts.SetupCMSSWPset import SetupCMSSWPset

        setupScript = SetupCMSSWPset()
        setupScript.step = self.createTestStep()
        setupScript.stepSpace = ConfigSection(name="stepSpace")
        setupScript.stepSpace.location = self.testDir
        shutil.copyfile(
            os.path.join(os.path.dirname(__file__), "WMTaskSpace", "cmsRun1",
                         "PSet.py"),
            os.path.join(setupScript.stepSpace.location, "PSet.py"))
        setupScript.job = self.createTestJob()
        setupScript()

        fixedPSet = self.loadProcessFromPSet(setupScript.stepSpace.location)

        self.assertTrue(hasattr(fixedPSet.source, 'fileNames'))
        self.assertTrue(hasattr(fixedPSet.source, 'secondaryFileNames'))
        self.assertEqual(fixedPSet.maxEvents.input._value, -1,
                         "Error: Wrong maxEvents.")

    def testEventsPerLumi(self):
        """
        _testEventsPerLumi_
        Verify that you can put in events per lumi in the process.

        """
        from WMCore.WMRuntime.Scripts.SetupCMSSWPset import SetupCMSSWPset

        setupScript = SetupCMSSWPset()
        setupScript.step = self.createTestStep()
        setupScript.step.setEventsPerLumi(500)
        setupScript.stepSpace = ConfigSection(name="stepSpace")
        setupScript.stepSpace.location = self.testDir
        shutil.copyfile(
            os.path.join(os.path.dirname(__file__), "WMTaskSpace", "cmsRun1",
                         "PSet.py"),
            os.path.join(setupScript.stepSpace.location, "PSet.py"))
        setupScript.job = self.createTestJob()
        setupScript()

        fixedPSet = self.loadProcessFromPSet(setupScript.stepSpace.location)

        self.assertTrue(hasattr(fixedPSet.source, 'fileNames'))
        self.assertTrue(hasattr(fixedPSet.source, 'secondaryFileNames'))
        self.assertEqual(fixedPSet.source.numberEventsInLuminosityBlock._value,
                         500,
                         "Error: Wrong number of events per luminosity block")
        self.assertEqual(fixedPSet.maxEvents.input._value, -1,
                         "Error: Wrong maxEvents.")

    def testChainedProcesing(self):
        """
        test for chained CMSSW processing - check the overriden TFC, its values
        and that input files is / are set correctly.

        """
        from WMCore.WMRuntime.Scripts.SetupCMSSWPset import SetupCMSSWPset

        setupScript = SetupCMSSWPset()
        setupScript.step = self.createTestStep()
        setupScript.stepSpace = ConfigSection(name="stepSpace")
        setupScript.stepSpace.location = self.testDir
        shutil.copyfile(
            os.path.join(os.path.dirname(__file__), "WMTaskSpace", "cmsRun1",
                         "PSet.py"),
            os.path.join(setupScript.stepSpace.location, "PSet.py"))
        setupScript.job = self.createTestJob()
        setupScript.step.setupChainedProcessing("my_first_step",
                                                "my_input_module")
        setupScript()
        fixedPSet = self.loadProcessFromPSet(setupScript.stepSpace.location)

        # test if the overriden TFC is right
        print("DEBUG override: {0}".format(
            setupScript.step.data.application.overrideCatalog))
        self.assertTrue(
            hasattr(setupScript.step.data.application, "overrideCatalog"),
            "Error: overriden TFC was not set")
        tfc = loadTFC(setupScript.step.data.application.overrideCatalog)
        inputFile = "../my_first_step/my_input_module.root"
        self.assertEqual(tfc.matchPFN("direct", inputFile), inputFile)
        self.assertEqual(tfc.matchLFN("direct", inputFile), inputFile)
        self.assertTrue(hasattr(fixedPSet.source, 'fileNames'))

    def testPileupSetup(self):
        """
        Test the pileup setting.

        reference (setupScript.process instance):
        in test/python/WMCore_t/WMRuntime_t/Scripts_t/WMTaskSpace/cmsRun1/PSet.py

        """
        try:
            from dbs.apis.dbsClient import DbsApi
        except ImportError as ex:
            raise unittest.SkipTest

        # this is modified and shortened version of
        # WMCore/test/python/WMCore_t/Misc_t/site-local-config.xml
        # since the dataset name in question (below) is only present at
        # storm-fe-cms.cr.cnaf.infn.it, need to make the test think it's its local SE
        siteLocalConfigContent = \
        """
<site-local-config>
    <site name="-SOME-SITE-NAME-">
        <event-data>
            <catalog url="trivialcatalog_file:/uscmst1/prod/sw/cms/SITECONF/T1_US_FNAL/PhEDEx/storage.xml?protocol=dcap"/>
        </event-data>
        <local-stage-out>
            <!-- original cmssrm.fnal.gov -->
            <phedex-node value="T2_CH_CERN"/>
            <command value="test-copy"/>
            <catalog url="trivialcatalog_file:/uscmst1/prod/sw/cms/SITECONF/T1_US_FNAL/PhEDEx/storage.xml?protocol=dcap"/>
        </local-stage-out>
        <calib-data>
            <frontier-connect>
                <load balance="proxies"/>
                <proxy url="http://cmsfrontier1.fnal.gov:3128"/>
                <proxy url="http://cmsfrontier2.fnal.gov:3128"/>
            </frontier-connect>
        </calib-data>
    </site>
</site-local-config>
"""
        siteLocalConfig = os.path.join(self.testDir,
                                       "test-site-local-config.xml")
        f = open(siteLocalConfig, 'w')
        f.write(siteLocalConfigContent)
        f.close()

        from WMCore.WMRuntime.Scripts.SetupCMSSWPset import SetupCMSSWPset
        setupScript = SetupCMSSWPset()
        setupScript.step = self.createTestStep()
        setupScript.stepSpace = ConfigSection(name="stepSpace")
        setupScript.stepSpace.location = os.path.join(self.testDir, "cmsRun1")
        setupScript.job = self.createTestJob()
        # define pileup configuration
        # despite of the implementation considering whichever type of pileup,
        # only "data" and "mc" types are eventually considered and lead to any
        # modifications of job input files
        pileupConfig = {
            "data": [
                "/Mu/PenguinsPenguinsEverywhere-SingleMu-HorriblyJaundicedYellowEyedPenginsSearchingForCarrots-v31/RECO"
            ],
            "mc": [
                "/Mu/PenguinsPenguinsEverywhere-SingleMu-HorriblyJaundicedYellowEyedPenginsSearchingForCarrots-v31/RECO"
            ]
        }
        dbsUrl = "https://cmsweb-prod.cern.ch/dbs/prod/global/DBSReader"
        setupScript.step.setupPileup(pileupConfig, dbsUrl)
        # SetupCMSSWPset pileup handling will be consulting SiteLocalConfig
        # to determine StorageElement (SE) name the job is running on
        # SiteLocalConfig loads the site-local-config.xml file from env.
        # variable defined location ; if the variable is not defined already, set it
        # obviously, if "WMAGENT_SITE_CONFIG_OVERRIDE" is already set here, the above
        # thick with SE name is not effective
        if not os.getenv("WMAGENT_SITE_CONFIG_OVERRIDE", None):
            os.environ["WMAGENT_SITE_CONFIG_OVERRIDE"] = siteLocalConfig
        # find out local site name from the testing local site config,
        # will be needed later
        siteConfig = loadSiteLocalConfig()
        seLocalName = siteConfig.localStageOut["phedex-node"]
        print("Running on site '%s', local SE name: '%s'" %
              (siteConfig.siteName, seLocalName))

        # before calling the script, SetupCMSSWPset will try to load JSON
        # pileup configuration file, need to create it in self.testDir
        fetcher = PileupFetcher()
        fetcher.setWorkingDirectory(self.testDir)
        fetcher.createPileupConfigFile(setupScript.step)

        setupScript()

        # now test all modifications carried out in SetupCMSSWPset.__call__
        # which will also test that CMSSWStepHelper.setupPileup run correctly
        mixModules, dataMixModules = setupScript._getPileupMixingModules()

        # load in the pileup configuration in the form of dict which
        # PileupFetcher previously saved in a JSON file
        pileupDict = setupScript._getPileupConfigFromJson()

        # get the sub dict for particular pileup type
        # for pileupDict structure description - see PileupFetcher._queryDbsAndGetPileupConfig
        for pileupType, modules in zip(("data", "mc"),
                                       (dataMixModules, mixModules)):
            # getting KeyError here - above pileupConfig is not correct - need
            # to have these two types of pile type
            d = pileupDict[pileupType]
            self._mixingModulesInputFilesTest(modules, d, seLocalName)

    def _mixingModulesInputFilesTest(self, modules, pileupSubDict,
                                     seLocalName):
        """
        pileupSubDic - contains only dictionary for particular pile up type

        """
        # consider only locally available files
        filesInConfigDict = []
        for v in viewvalues(pileupSubDict):
            if seLocalName in v["phedexNodeNames"]:
                filesInConfigDict.extend(v["FileList"])

        for m in modules:
            inputTypeAttrib = getattr(m, "input", None) or getattr(
                m, "secsource", None)
            fileNames = inputTypeAttrib.fileNames.value
            if fileNames == None:
                fileNames = []
            m = (
                "Pileup configuration file list '%s' and mixing modules input "
                "filelist '%s' are not identical." %
                (filesInConfigDict, fileNames))
            self.assertEqual(sorted(filesInConfigDict), sorted(fileNames), m)
Exemple #41
0
class LogArchiveTest(unittest.TestCase):

    def setUp(self):
        self.testInit = TestInit(__file__)
        self.testDir = self.testInit.generateWorkDir()

        # shut up SiteLocalConfig
        os.environ['CMS_PATH'] = os.getcwd()
        workload = copy.deepcopy(testWorkloads.workload)
        task = workload.getTask("Production")
        step = task.getStep("stageOut1")
        # want to get the cmsstep so I can make the Report
        cmsstep = task.getStep('cmsRun1')
        self.cmsstepdir = os.path.join( self.testDir, 'cmsRun1')
        os.mkdir( self.cmsstepdir )
        open( os.path.join( self.cmsstepdir, '__init__.py'),'w').close()
        open( os.path.join( self.cmsstepdir, 'Report.pkl'),'w').close()

        cmsbuilder = CMSSWBuilder.CMSSW()
        cmsbuilder( cmsstep.data, 'Production', self.cmsstepdir )
        realstep = LogArchiveTemplate.LogArchiveStepHelper(step.data)
        realstep.disableRetries()
        self.realstep = realstep
        self.stepDir = os.path.join( self.testDir, 'stepdir')
        os.mkdir( self.stepDir )
        builder = LogArchiveBuilder.LogArchive()
        builder( step.data, 'Production', self.stepDir)
        
        # stolen from CMSSWExecutor_t. thanks, dave

        # first, delete all the sandboxen and taskspaces
        #    because of caching, this leaks from other tests in other files
        #    this sucks because the other tests are using sandboxen that
        #    are deleted after the test is over, which causes theses tests
        #    to break
        modsToDelete = []
        # not sure what happens if you delete from
        # an arrey you're iterating over. doing it in
        # two steps
        for modname in sys.modules.keys():
            # need to blow away things in sys.modules, otherwise
            # they are cached and we look at old taskspaces
            if modname.startswith('WMTaskSpace'):
                modsToDelete.append(modname)
            if modname.startswith('WMSandbox'):
                modsToDelete.append(modname)
        for modname in modsToDelete:
            try:
                reload(sys.modules[modname])
            except:
                pass
            del sys.modules[modname]

        self.oldpath = sys.path[:]
        self.testInit = TestInit(__file__)


        self.testDir = self.testInit.generateWorkDir()
        self.job = Job(name = "/UnitTests/DeleterTask/DeleteTest-test-job")
        shutil.copyfile('/etc/hosts', os.path.join(self.testDir, 'testfile'))

        self.workload = newWorkload("UnitTests")
        self.task = self.workload.newTask("DeleterTask")

        cmsswHelper = self.task.makeStep("cmsRun1")
        cmsswHelper.setStepType('CMSSW')
        stepHelper = cmsswHelper.addStep("DeleteTest")
        stepHelper.setStepType('LogArchive')

        self.cmsswstep = cmsswHelper.data
        self.cmsswHelper = cmsswHelper


        self.stepdata = stepHelper.data
        self.stephelp = LogArchiveTemplate.LogArchiveStepHelper(stepHelper.data)
        self.task.applyTemplates()

        self.executor = StepFactory.getStepExecutor(self.stephelp.stepType())
        taskMaker = TaskMaker(self.workload, os.path.join(self.testDir))
        taskMaker.skipSubscription = True
        taskMaker.processWorkload()


        self.task.build(os.path.join(self.testDir, 'UnitTests'))

        sys.path.insert(0, self.testDir)
        sys.path.insert(0, os.path.join(self.testDir, 'UnitTests'))


#        binDir = inspect.getsourcefile(ModuleLocator)
#        binDir = binDir.replace("__init__.py", "bin")
#
#        if not binDir in os.environ['PATH']:
#            os.environ['PATH'] = "%s:%s" % (os.environ['PATH'], binDir)
        open( os.path.join( self.testDir, 'UnitTests', '__init__.py'),'w').close()
        shutil.copyfile( os.path.join( os.path.dirname( __file__ ), 'MergeSuccess.pkl'),
                         os.path.join( self.testDir, 'UnitTests', 'WMTaskSpace', 'cmsRun1' , 'Report.pkl'))

    def tearDown(self):
        sys.path = self.oldpath[:]
        self.testInit.delWorkDir()


        # making double sure WMTaskSpace and WMSandbox are gone
        modsToDelete = []
        # not sure what happens if you delete from
        # an arrey you're iterating over. doing it in
        # two steps
        for modname in sys.modules.keys():
            # need to blow away things in sys.modules, otherwise
            # they are cached and we look at old taskspaces
            if modname.startswith('WMTaskSpace'):
                modsToDelete.append(modname)
            if modname.startswith('WMSandbox'):
                modsToDelete.append(modname)
        for modname in modsToDelete:
            try:
                reload(sys.modules[modname])
            except:
                pass
            del sys.modules[modname]
        myThread = threading.currentThread()
        if hasattr(myThread, "factory"):
            myThread.factory = {}

    def makeReport(self, fileName):
        myReport = Report('oneitem')
        myReport.addStep('stageOut1')
        mod1 = myReport.addOutputModule('module1')
        mod2 = myReport.addOutputModule('module2')
        file1 = myReport.addOutputFile('module1', {'lfn': 'FILE1', 'size' : 1, 'events' : 1})
        file2 = myReport.addOutputFile('module2', {'lfn': 'FILE2', 'size' : 1, 'events' : 1})
        file3 = myReport.addOutputFile('module2', {'lfn': 'FILE3', 'size' : 1, 'events' : 1})
        myReport.persist( fileName )

    def testExecutorDoesntDetonate(self):
        myReport = Report()
        myReport.unpersist(os.path.join( self.testDir,'UnitTests', 'WMTaskSpace', 'cmsRun1' , 'Report.pkl'))
        myReport.data.cmsRun1.status = 1
        myReport.persist(os.path.join( self.testDir, 'UnitTests','WMTaskSpace', 'cmsRun1' , 'Report.pkl'))

        executor = LogArchiveExecutor.LogArchive()
        
        executor.initialise( self.stepdata, self.job)
        self.setLocalOverride(self.stepdata)
        executor.step = self.stepdata
        executor.execute( )
        self.assertFalse( os.path.exists( os.path.join( self.testDir, 'hosts' )))
        self.assertFalse( os.path.exists( os.path.join( self.testDir, 'test1', 'hosts')))
        return
    
    
    def testUnitTestBackend(self):
        myReport = Report()
        myReport.unpersist(os.path.join( self.testDir,'UnitTests', 'WMTaskSpace', 'cmsRun1' , 'Report.pkl'))
        myReport.data.cmsRun1.status = 1
        myReport.persist(os.path.join( self.testDir, 'UnitTests','WMTaskSpace', 'cmsRun1' , 'Report.pkl'))

        executor = LogArchiveExecutor.LogArchive()
        helper = LogArchiveTemplate.LogArchiveStepHelper(self.stepdata)
        helper.addOverride(override = 'command', overrideValue='test-win')
        helper.addOverride(override = 'option', overrideValue='')
        helper.addOverride(override = 'se-name', overrideValue='charlie.sheen.biz')
        helper.addOverride(override = 'lfn-prefix', overrideValue='test-win')
        
        executor.initialise( self.stepdata, self.job)
        self.setLocalOverride(self.stepdata)
        executor.step = self.stepdata
        executor.execute( )
        self.assertFalse( os.path.exists( os.path.join( self.testDir, 'hosts' )))
        self.assertFalse( os.path.exists( os.path.join( self.testDir, 'test1', 'hosts')))

    def testUnitTestBackendNew(self):
        myReport = Report()
        myReport.unpersist(os.path.join( self.testDir,'UnitTests', 'WMTaskSpace', 'cmsRun1' , 'Report.pkl'))
        myReport.data.cmsRun1.status = 1
        myReport.persist(os.path.join( self.testDir, 'UnitTests','WMTaskSpace', 'cmsRun1' , 'Report.pkl'))

        executor = LogArchiveExecutor.LogArchive()
        helper = LogArchiveTemplate.LogArchiveStepHelper(self.stepdata)
        helper.addOverride(override = 'command', overrideValue='test-win')
        helper.addOverride(override = 'option', overrideValue='')
        helper.addOverride(override = 'se-name', overrideValue='charlie.sheen.biz')
        helper.addOverride(override = 'lfn-prefix', overrideValue='test-win')
        helper.setNewStageoutOverride( True )
        
        executor.initialise( self.stepdata, self.job)
        self.setLocalOverride(self.stepdata)
        executor.step = self.stepdata
        executor.execute( )
        self.assertFalse( os.path.exists( os.path.join( self.testDir, 'hosts' )))
        self.assertFalse( os.path.exists( os.path.join( self.testDir, 'test1', 'hosts')))
        
    def setLocalOverride(self, step):
        step.section_('override')
        step.override.command    = 'cp'
        step.override.option     = ''
        step.override.__setattr__('lfn-prefix', self.testDir +"/")
        step.override.__setattr__('se-name','DUMMYSE')
        step.override.__setattr__('phedex-node','DUMMYPNN')
Exemple #42
0
class CouchTest(unittest.TestCase):
    def setUp(self):
        self.testInit = TestInit(__file__)
        self.testInit.setLogging(logLevel = logging.DEBUG)
        self.testDir = self.testInit.generateWorkDir()
        self.config = getConfig(self.testDir)
        # mock generator instance to communicate some configuration values
        self.generator = utils.AlertGeneratorMock(self.config)
        self.testName = self.id().split('.')[-1]


    def tearDown(self):
        self.testInit.delWorkDir()
        self.generator = None


    def testAlertGeneratorCouchDbSizePollerBasic(self):
        config = getConfig("/tmp")
        try:
            poller = CouchDbSizePoller(config.AlertGenerator.couchDbSizePoller, self.generator)
        except Exception as ex:
            self.fail("%s: exception: %s" % (self.testName, ex))
        poller.check() # -> on real system dir may result in permission denied
        poller._dbDirectory = "/dev"
        poller.check() # -> OK

        # test failing during set up
        poller = CouchDbSizePoller(config.AlertGenerator.couchDbSizePoller, self.generator)
        poller._query = "nonsense query"
        # this will fail on the above query
        self.assertRaises(Exception, poller._getDbDir)
        poller.check()


    def testAlertGeneratorCouchDbSizePollerSoftThreshold(self):
        self.config.AlertGenerator.couchDbSizePoller.couchURL = os.getenv("COUCHURL", None)
        self.config.AlertGenerator.couchDbSizePoller.soft = 5
        self.config.AlertGenerator.couchDbSizePoller.critical = 10
        self.config.AlertGenerator.couchDbSizePoller.pollInterval = 0.2
        ti = utils.TestInput() # see attributes comments at the class
        ti.pollerClass = CouchDbSizePoller
        ti.config = self.config.AlertGenerator.couchDbSizePoller
        ti.thresholdToTest = self.config.AlertGenerator.couchDbSizePoller.soft
        ti.level = self.config.AlertProcessor.soft.level
        ti.expected = 1
        ti.thresholdDiff = 4
        ti.testCase = self
        utils.doGenericValueBasedPolling(ti)


    def testAlertGeneratorCouchDbSizePollerCriticalThreshold(self):
        self.config.AlertGenerator.couchDbSizePoller.couchURL = os.getenv("COUCHURL", None)
        self.config.AlertGenerator.couchDbSizePoller.soft = 5
        self.config.AlertGenerator.couchDbSizePoller.critical = 10
        self.config.AlertGenerator.couchDbSizePoller.pollInterval = 0.2
        ti = utils.TestInput() # see attributes comments at the class
        ti.pollerClass = CouchDbSizePoller
        ti.config = self.config.AlertGenerator.couchDbSizePoller
        ti.thresholdToTest = self.config.AlertGenerator.couchDbSizePoller.critical
        ti.level = self.config.AlertProcessor.critical.level
        ti.expected = 1
        ti.thresholdDiff = 1
        ti.testCase = self
        utils.doGenericValueBasedPolling(ti)


    def testAlertGeneratorCouchDbSizePollerNoAlert(self):
        self.config.AlertGenerator.couchDbSizePoller.couchURL = os.getenv("COUCHURL", None)
        self.config.AlertGenerator.couchDbSizePoller.soft = 5
        self.config.AlertGenerator.couchDbSizePoller.critical = 10
        self.config.AlertGenerator.couchDbSizePoller.pollInterval = 0.2
        ti = utils.TestInput() # see attributes comments at the class
        ti.pollerClass = CouchDbSizePoller
        ti.config = self.config.AlertGenerator.couchDbSizePoller
        ti.thresholdToTest = self.config.AlertGenerator.couchDbSizePoller.soft - 3
        ti.expected = 0
        ti.thresholdDiff = 2
        ti.testCase = self
        utils.doGenericValueBasedPolling(ti)


    def testAlertGeneratorCouchPollerBasic(self):
        self.config.AlertGenerator.section_("bogusCouchPoller")
        self.config.AlertGenerator.bogusCouchPoller.couchURL = os.getenv("COUCHURL", None)
        self.config.AlertGenerator.bogusCouchPoller.soft = 1000
        self.config.AlertGenerator.bogusCouchPoller.critical = 2000
        self.config.AlertGenerator.bogusCouchPoller.pollInterval = 0.2
        self.config.AlertGenerator.bogusCouchPoller.period = 1
        try:
            poller = CouchPoller(self.config.AlertGenerator.bogusCouchPoller,
                                 self.generator)
        except Exception as ex:
            self.fail("%s: exception: %s" % (self.testName, ex))
        # this class would not have defined polling sample function, give it one
        poller.sample = lambda proc: float(12)
        poller.check()
        # assuming CouchDb server is running, check that 1 sensible measurement value was collected
        self.assertEqual(len(poller._measurements), 1)
        self.assertTrue(isinstance(poller._measurements[0], types.FloatType))
        # test handling of a non-existing process
        CouchPoller._getProcessPID = lambda inst: 1212121212
        self.assertRaises(Exception, CouchPoller,
                          self.config.AlertGenerator.bogusCouchPoller, self.generator)


    def testAlertGeneratorCouchCPUPollerSoftThreshold(self):
        self.config.AlertGenerator.couchCPUPoller.couchURL = os.getenv("COUCHURL", None)
        self.config.AlertGenerator.couchCPUPoller.soft = 70
        self.config.AlertGenerator.couchCPUPoller.critical = 80
        self.config.AlertGenerator.couchCPUPoller.pollInterval = 0.2
        self.config.AlertGenerator.couchCPUPoller.period = 1
        ti = utils.TestInput() # see attributes comments at the class
        ti.pollerClass = CouchCPUPoller
        ti.config = self.config.AlertGenerator.couchCPUPoller
        ti.thresholdToTest = self.config.AlertGenerator.couchCPUPoller.soft
        ti.level = self.config.AlertProcessor.soft.level
        ti.expected = 1
        ti.thresholdDiff = 10
        ti.testCase = self
        utils.doGenericPeriodAndProcessPolling(ti)


    def testAlertGeneratorCouchCPUPollerCriticalThreshold(self):
        self.config.AlertGenerator.couchCPUPoller.couchURL = os.getenv("COUCHURL", None)
        self.config.AlertGenerator.couchCPUPoller.soft = 70
        self.config.AlertGenerator.couchCPUPoller.critical = 80
        self.config.AlertGenerator.couchCPUPoller.pollInterval = 0.2
        self.config.AlertGenerator.couchCPUPoller.period = 1
        ti = utils.TestInput() # see attributes comments at the class
        ti.pollerClass = CouchCPUPoller
        ti.config = self.config.AlertGenerator.couchCPUPoller
        ti.thresholdToTest = self.config.AlertGenerator.couchCPUPoller.critical
        ti.level = self.config.AlertProcessor.critical.level
        ti.expected = 1
        ti.thresholdDiff = 10
        ti.testCase = self
        utils.doGenericPeriodAndProcessPolling(ti)


    def testAlertGeneratorCouchCPUPollerNoAlert(self):
        self.config.AlertGenerator.couchCPUPoller.couchURL = os.getenv("COUCHURL", None)
        self.config.AlertGenerator.couchCPUPoller.soft = 70
        self.config.AlertGenerator.couchCPUPoller.critical = 80
        self.config.AlertGenerator.couchCPUPoller.pollInterval = 0.2
        self.config.AlertGenerator.couchCPUPoller.period = 1
        ti = utils.TestInput() # see attributes comments at the class
        ti.pollerClass = CouchCPUPoller
        ti.config = self.config.AlertGenerator.couchCPUPoller
        # lower the threshold so that the alert is never generated
        ti.thresholdToTest = self.config.AlertGenerator.couchCPUPoller.soft - 20
        ti.level = 0
        ti.expected = 0
        ti.thresholdDiff = 10
        ti.testCase = self
        utils.doGenericPeriodAndProcessPolling(ti)


    def testAlertGeneratorCouchMemoryPollerSoftThreshold(self):
        self.config.AlertGenerator.couchMemPoller.couchURL = os.getenv("COUCHURL", None)
        self.config.AlertGenerator.couchMemPoller.soft = 70
        self.config.AlertGenerator.couchMemPoller.critical = 80
        self.config.AlertGenerator.couchMemPoller.pollInterval = 0.2
        self.config.AlertGenerator.couchMemPoller.period = 1
        ti = utils.TestInput() # see attributes comments at the class
        ti.pollerClass = CouchMemoryPoller
        ti.config = self.config.AlertGenerator.couchMemPoller
        ti.thresholdToTest = self.config.AlertGenerator.couchMemPoller.soft
        ti.level = self.config.AlertProcessor.soft.level
        ti.expected = 1
        ti.thresholdDiff = 10
        ti.testCase = self
        utils.doGenericPeriodAndProcessPolling(ti)


    def testAlertGeneratorCouchMemoryPollerCriticalThreshold(self):
        self.config.AlertGenerator.couchMemPoller.couchURL = os.getenv("COUCHURL", None)
        self.config.AlertGenerator.couchMemPoller.soft = 70
        self.config.AlertGenerator.couchMemPoller.critical = 80
        self.config.AlertGenerator.couchMemPoller.pollInterval = 0.2
        self.config.AlertGenerator.couchMemPoller.period = 1
        ti = utils.TestInput() # see attributes comments at the class
        ti.pollerClass = CouchMemoryPoller
        ti.config = self.config.AlertGenerator.couchMemPoller
        ti.thresholdToTest = self.config.AlertGenerator.couchMemPoller.critical
        ti.level = self.config.AlertProcessor.critical.level
        ti.expected = 1
        ti.thresholdDiff = 10
        ti.testCase = self
        utils.doGenericPeriodAndProcessPolling(ti)


    def testAlertGeneratorCouchMemoryPollerNoAlert(self):
        self.config.AlertGenerator.couchMemPoller.couchURL = os.getenv("COUCHURL", None)
        self.config.AlertGenerator.couchMemPoller.soft = 70
        self.config.AlertGenerator.couchMemPoller.critical = 80
        self.config.AlertGenerator.couchMemPoller.pollInterval = 0.2
        self.config.AlertGenerator.couchMemPoller.period = 1
        ti = utils.TestInput() # see attributes comments at the class
        ti.pollerClass = CouchMemoryPoller
        ti.config = self.config.AlertGenerator.couchMemPoller
        # lower the threshold so that the alert is never generated
        ti.thresholdToTest = self.config.AlertGenerator.couchMemPoller.soft - 20
        ti.level = 0
        ti.expected = 0
        ti.thresholdDiff = 10
        ti.testCase = self
        utils.doGenericPeriodAndProcessPolling(ti)


    def testAlertGeneratorCouchErrorsPollerBasic(self):
        try:
            poller = CouchErrorsPoller(self.config.AlertGenerator.couchErrorsPoller, self.generator)
        except Exception as ex:
            self.fail("Exception, reason: %s" % ex)

        # even a single values to observe shall be turned into particular iterables
        obs = self.config.AlertGenerator.couchErrorsPoller.observables
        self.config.AlertGenerator.couchErrorsPoller.observables = 400
        try:
            poller = CouchErrorsPoller(self.config.AlertGenerator.couchErrorsPoller, self.generator)
        except Exception as ex:
            self.fail("Exception, reason: %s" % ex)
        #self.assertTrue(isinstance(obs, (types.ListType, types.TupleType)))
        self.assertTrue(isinstance(self.config.AlertGenerator.couchErrorsPoller.observables,
                                   (types.ListType, types.TupleType)))

        # test return value on non-sense HTTP status code
        res = poller.sample("40000")
        self.assertFalse(res)
        # test definitely existing value
        res = poller.sample("200")
        # on a freshly started couch, this status code may not exist in the
        # stats table, so despite correct and meaningful HTTP status code, the
        # query may still return None, hence don't assume any particular response
        #self.assertTrue(isinstance(res, types.IntType))
        poller.check()


    def testAlertGeneratorCouchErrorsPollerSoftThreshold(self):
        self.config.AlertGenerator.couchErrorsPoller.couchURL = os.getenv("COUCHURL", None)
        self.config.AlertGenerator.couchErrorsPoller.soft = 100
        self.config.AlertGenerator.couchErrorsPoller.critical = 200
        # shall expect corresponding number of generated alerts for each observable value
        self.config.AlertGenerator.couchErrorsPoller.observables = (5, 6, 7)
        self.config.AlertGenerator.couchErrorsPoller.pollInterval = 0.2
        ti = utils.TestInput() # see attributes comments at the class
        ti.pollerClass = CouchErrorsPoller
        ti.config = self.config.AlertGenerator.couchErrorsPoller
        ti.thresholdToTest = self.config.AlertGenerator.couchErrorsPoller.soft
        ti.level = self.config.AlertProcessor.soft.level
        ti.expected = len(self.config.AlertGenerator.couchErrorsPoller.observables)
        ti.thresholdDiff = 10
        ti.testCase = self
        utils.doGenericValueBasedPolling(ti)


    def testAlertGeneratorCouchErrorsPollerCriticalThreshold(self):
        self.config.AlertGenerator.couchErrorsPoller.couchURL = os.getenv("COUCHURL", None)
        self.config.AlertGenerator.couchErrorsPoller.soft = 100
        self.config.AlertGenerator.couchErrorsPoller.critical = 200
        # shall expect corresponding number of generated alerts for each observable value
        self.config.AlertGenerator.couchErrorsPoller.observables = (5, 6, 7)
        self.config.AlertGenerator.couchErrorsPoller.pollInterval = 0.2
        ti = utils.TestInput() # see attributes comments at the class
        ti.pollerClass = CouchErrorsPoller
        ti.config = self.config.AlertGenerator.couchErrorsPoller
        ti.thresholdToTest = self.config.AlertGenerator.couchErrorsPoller.critical
        ti.level = self.config.AlertProcessor.critical.level
        ti.expected = len(self.config.AlertGenerator.couchErrorsPoller.observables)
        ti.thresholdDiff = 50
        ti.testCase = self
        utils.doGenericValueBasedPolling(ti)


    def testAlertGeneratorCouchErrorsPollerNoAlert(self):
        self.config.AlertGenerator.couchErrorsPoller.couchURL = os.getenv("COUCHURL", None)
        self.config.AlertGenerator.couchErrorsPoller.soft = 100
        self.config.AlertGenerator.couchErrorsPoller.critical = 200
        # shall expect corresponding number of generated alerts for each observable value
        self.config.AlertGenerator.couchErrorsPoller.observables = (5, 6, 7)
        self.config.AlertGenerator.couchErrorsPoller.pollInterval = 0.2
        ti = utils.TestInput() # see attributes comments at the class
        ti.pollerClass = CouchErrorsPoller
        ti.config = self.config.AlertGenerator.couchErrorsPoller
        ti.thresholdToTest = self.config.AlertGenerator.couchErrorsPoller.soft - 30
        ti.expected = 0
        ti.thresholdDiff = 20
        ti.testCase = self
        utils.doGenericValueBasedPolling(ti)
Exemple #43
0
class SetupCMSSWPsetTest(unittest.TestCase):
    def setUp(self):
        self.testInit = TestInit(__file__)
        self.testInit.setLogging()
        self.testDir = self.testInit.generateWorkDir()
        sys.path.insert(0, os.path.join(WMCore.WMBase.getTestBase(),
                                        "WMCore_t/WMRuntime_t/Scripts_t"))

    def tearDown(self):
        sys.path.remove(os.path.join(WMCore.WMBase.getTestBase(),
                                     "WMCore_t/WMRuntime_t/Scripts_t"))
        del sys.modules["WMTaskSpace"]
        self.testInit.delWorkDir()
        os.unsetenv("WMAGENT_SITE_CONFIG_OVERRIDE")

    def createTestStep(self):
        """
        _createTestStep_

        Create a test step that can be passed to the setup script.
        
        """
        newStep = WMStep("cmsRun1")
        newStepHelper = CMSSWStepHelper(newStep)
        newStepHelper.setStepType("CMSSW")
        newStepHelper.setGlobalTag("SomeGlobalTag")
        stepTemplate = StepFactory.getStepTemplate("CMSSW")
        stepTemplate(newStep)
        newStep.application.command.configuration = "PSet.pkl"
        return newStepHelper
    

    def createTestJob(self):
        """
        _createTestJob_

        Create a test job that has parents for each input file.
        
        """
        newJob = Job(name = "TestJob")
        newJob.addFile(File(lfn = "/some/file/one",
                            parents = set([File(lfn = "/some/parent/one")])))
        newJob.addFile(File(lfn = "/some/file/two",
                            parents = set([File(lfn = "/some/parent/two")])))
        return newJob
    
    
    def testPSetFixup(self):
        """
        _testPSetFixup_

        Verify that all necessary parameters are set in the PSet.
        
        """
        from WMCore.WMRuntime.Scripts.SetupCMSSWPset import SetupCMSSWPset        
        setupScript = SetupCMSSWPset()
        setupScript.step = self.createTestStep()
        setupScript.stepSpace = ConfigSection(name = "stepSpace")
        setupScript.stepSpace.location = self.testDir
        setupScript.job = self.createTestJob()
        setupScript()

        fixedPSetHandle = open(os.path.join(self.testDir, "PSet.pkl"))
        fixedPSet = pickle.load(fixedPSetHandle)
        fixedPSetHandle.close()

        self.assertEqual(fixedPSet.GlobalTag.globaltag.value, "SomeGlobalTag",
                         "Error: Wrong global tag.")

        self.assertEqual(len(fixedPSet.source.fileNames.value), 2,
                         "Error: Wrong number of files.")
        self.assertEqual(len(fixedPSet.source.secondaryFileNames.value), 2,
                         "Error: Wrong number of secondary files.")
        self.assertEqual(fixedPSet.source.fileNames.value[0], "/some/file/one",
                         "Error: Wrong input file.")
        self.assertEqual(fixedPSet.source.fileNames.value[1], "/some/file/two",
                         "Error: Wrong input file.")
        self.assertEqual(fixedPSet.source.secondaryFileNames.value[0], "/some/parent/one",
                         "Error: Wrong input file.")
        self.assertEqual(fixedPSet.source.secondaryFileNames.value[1], "/some/parent/two",
                         "Error: Wrong input file.")
        self.assertEqual(fixedPSet.maxEvents.input.value, -1,
                         "Error: Wrong maxEvents.")

    
    def testChainedProcesing(self):
        """
        test for chained CMSSW processing - check the overriden TFC, its values
        and that input files is / are set correctly.
        
        """
        from WMCore.WMRuntime.Scripts.SetupCMSSWPset import SetupCMSSWPset        
        setupScript = SetupCMSSWPset()
        setupScript.step = self.createTestStep()
        setupScript.stepSpace = ConfigSection(name = "stepSpace")
        setupScript.stepSpace.location = self.testDir
        setupScript.job = self.createTestJob()
        setupScript.step.setupChainedProcessing("my_first_step", "my_input_module")
        setupScript()

        # test if the overriden TFC is right
        self.assertTrue(hasattr(setupScript.step.data.application, "overrideCatalog"),
                        "Error: overriden TFC was not set")
        tfc = loadTFC(setupScript.step.data.application.overrideCatalog)
        inputFile = "../my_first_step/my_input_module.root"
        self.assertEqual(tfc.matchPFN("direct", inputFile), inputFile)
        self.assertEqual(tfc.matchLFN("direct", inputFile), inputFile)

        self.assertEqual(setupScript.process.source.fileNames.value, [inputFile])
        
        
    def testPileupSetup(self):
        """
        Test the pileup setting.

        reference (setupScript.process instance):
        in test/python/WMCore_t/WMRuntime_t/Scripts_t/WMTaskSpace/cmsRun1/PSet.py
        
        """
        try:
             from DBSAPI.dbsApi import DbsApi
        except ImportError, ex:
            raise nose.SkipTest

        # this is modified and shortened version of 
        # WMCore/test/python/WMCore_t/Misc_t/site-local-config.xml
        # since the dataset name in question (below) is only present at
        # storm-fe-cms.cr.cnaf.infn.it, need to make the test think it's its local SE
        siteLocalConfigContent = \
        """
<site-local-config>
    <site name="-SOME-SITE-NAME-">
        <event-data>
            <catalog url="trivialcatalog_file:/uscmst1/prod/sw/cms/SITECONF/T1_US_FNAL/PhEDEx/storage.xml?protocol=dcap"/>
        </event-data>
        <local-stage-out>
            <!-- original cmssrm.fnal.gov -->
            <se-name value="storm-fe-cms.cr.cnaf.infn.it"/>
            <command value="test-copy"/>
            <catalog url="trivialcatalog_file:/uscmst1/prod/sw/cms/SITECONF/T1_US_FNAL/PhEDEx/storage.xml?protocol=dcap"/>
        </local-stage-out>
        <calib-data>
            <frontier-connect>
                <load balance="proxies"/>
                <proxy url="http://cmsfrontier1.fnal.gov:3128"/>
                <proxy url="http://cmsfrontier2.fnal.gov:3128"/>
            </frontier-connect>
        </calib-data>
    </site>
</site-local-config>
"""
        siteLocalConfig = os.path.join(self.testDir, "test-site-local-config.xml")
        f = open(siteLocalConfig, 'w')
        f.write(siteLocalConfigContent)
        f.close()

        from WMCore.WMRuntime.Scripts.SetupCMSSWPset import SetupCMSSWPset        
        setupScript = SetupCMSSWPset()
        setupScript.step = self.createTestStep()
        setupScript.stepSpace = ConfigSection(name = "stepSpace")
        setupScript.stepSpace.location = os.path.join(self.testDir, "cmsRun1")
        setupScript.job = self.createTestJob()
        # define pileup configuration
        # despite of the implementation considering whichever type of pileup,
        # only "data" and "mc" types are eventually considered and lead to any
        # modifications of job input files
        pileupConfig = {"data": ["/Mu/PenguinsPenguinsEverywhere-SingleMu-HorriblyJaundicedYellowEyedPenginsSearchingForCarrots-v31/RECO"],
                        "mc": ["/Mu/PenguinsPenguinsEverywhere-SingleMu-HorriblyJaundicedYellowEyedPenginsSearchingForCarrots-v31/RECO"]}
        dbsUrl = "http://cmsdbsprod.cern.ch/cms_dbs_prod_global/servlet/DBSServlet"
        setupScript.step.setupPileup(pileupConfig, dbsUrl)
        # SetupCMSSWPset pileup handling will be consulting SiteLocalConfig
        # to determine StorageElement (SE) name the job is running on
        # SiteLocalConfig loads the site-local-config.xml file from env.
        # variable defined location ; if the variable is not defined already, set it
        # obviously, if "WMAGENT_SITE_CONFIG_OVERRIDE" is already set here, the above
        # thick with SE name is not effective
        if not os.getenv("WMAGENT_SITE_CONFIG_OVERRIDE", None):
            os.environ["WMAGENT_SITE_CONFIG_OVERRIDE"] = siteLocalConfig
        # find out local site name from the testing local site config,
        # will be needed later
        siteConfig = loadSiteLocalConfig()
        seLocalName = siteConfig.localStageOut["se-name"]
        print "Running on site '%s', local SE name: '%s'" % (siteConfig.siteName, seLocalName)
        
        # before calling the script, SetupCMSSWPset will try to load JSON
        # pileup configuration file, need to create it in self.testDir
        fetcher = PileupFetcher()
        fetcher.setWorkingDirectory(self.testDir)
        fetcher._createPileupConfigFile(setupScript.step)
                
        setupScript()
        
        # now test all modifications carried out in SetupCMSSWPset.__call__
        # which will also test that CMSSWStepHelper.setupPileup run correctly
        mixModules, dataMixModules = setupScript._getPileupMixingModules()

        # load in the pileup configuration in the form of dict which
        # PileupFetcher previously saved in a JSON file
        pileupDict = setupScript._getPileupConfigFromJson()
        
        # get the sub dict for particular pileup type
        # for pileupDict structure description - see PileupFetcher._queryDbsAndGetPileupConfig
        for pileupType, modules in zip(("data", "mc"), (dataMixModules, mixModules)):
            # getting KeyError here - above pileupConfig is not correct - need
            # to have these two types of pile type
            d = pileupDict[pileupType]
            self._mixingModulesInputFilesTest(modules, d, seLocalName)        
Exemple #44
0
class Scram_t(unittest.TestCase):
    def setUp(self):
        self.testInit = TestInit(__file__)
        self.testInit.setLogging()
        self.testDir = self.testInit.generateWorkDir()
        self.oldCwd = os.getcwd()
        if PY3:
            self.assertItemsEqual = self.assertCountEqual

    def tearDown(self):
        self.testInit.delWorkDir()

    def testA(self):
        """
        instantiate a Scram instance in test mode.
        """
        try:
            Scram(initialise="/bin/date",
                  architecture="slc5_amd64_gcc454",
                  version="CMSSW_X_Y_Z",
                  test=True)
        except Exception as ex:
            msg = "Failed to instantiate Scram in test mode:\n %s " % str(ex)
            self.fail(msg)

    def testB(self):
        """
        instantiante a Scram instance in non-test mode
        limited what we can test here since we dont have scram etc in unittest env
        """
        try:
            Scram(initialise="/bin/date",
                  architecture="slc5_amd64_gcc454",
                  version="CMSSW_X_Y_Z")
        except Exception as ex:
            msg = "Failed to instantiate Scram:\n %s " % str(ex)
            self.fail(msg)

    def testC(self):
        """
        test all method calls in test mode

        """
        s = Scram(initialise="/bin/date",
                  architecture="slc5_amd64_gcc454",
                  version="CMSSW_X_Y_Z",
                  directory=self.testDir,
                  test=True)

        try:
            status = s.project()
        except Exception as ex:
            msg = "Error running Scram.project:\n %s" % str(ex)
            self.fail(msg)

        self.assertEqual(status, 0)
        self.assertTrue(os.path.exists(s.projectArea))
        self.assertTrue("project" in s.lastExecuted)
        self.assertTrue("CMSSW_X_Y_Z" in s.lastExecuted)

        try:
            status = s.runtime()
        except Exception as ex:
            msg = "Error running Scram.runtime:\n %s" % str(ex)
            self.fail(msg)

        self.assertEqual(status, 0)
        self.assertTrue("ru -sh" in s.lastExecuted)
        self.assertTrue("TEST_MODE" in s.runtimeEnv)

        comm = "echo \"Hello World\""
        try:
            status = s(comm)
        except Exception as ex:
            msg = "Failed to call Scram object:\n %s" % str(ex)

        self.assertEqual(status, 0)
        self.assertEqual(s.lastExecuted, comm)

    def testArchMap(self):
        self.assertItemsEqual(OS_TO_ARCH['rhel6'], ['slc5', 'slc6'])
        self.assertItemsEqual(OS_TO_ARCH['rhel7'], ['slc7'])
        self.assertItemsEqual(ARCH_TO_OS['slc6'], ['rhel6'])
        self.assertItemsEqual(ARCH_TO_OS['slc7'], ['rhel7'])

    def testScramArchParsing(self):
        """
        Test the various modes of parsing for the scram arch
        """
        try:
            os.chdir(self.testDir)
            with tempfile.NamedTemporaryFile() as tf:
                tf.write(b'GLIDEIN_REQUIRED_OS = "rhel6" \n')
                tf.write(b'Memory = 2048\n')
                tf.flush()
                with tmpEnv(_CONDOR_MACHINE_AD=tf.name):
                    self.assertEqual(getSingleScramArch('slc6_blah_blah'),
                                     'slc6_blah_blah')
                    self.assertEqual(getSingleScramArch('slc5_blah_blah'),
                                     'slc5_blah_blah')
                    self.assertEqual(
                        getSingleScramArch(
                            ['slc6_blah_blah', 'slc7_blah_blah']),
                        'slc6_blah_blah')
                    self.assertEqual(
                        getSingleScramArch(
                            ['slc6_blah_blah', 'slc5_blah_blah']),
                        'slc6_blah_blah')
                    self.assertEqual(
                        getSingleScramArch(
                            ['slc7_blah_blah', 'slc8_blah_blah']), None)
            with tempfile.NamedTemporaryFile() as tf:
                tf.write(b'GLIDEIN_REQUIRED_OS = "rhel7" \n')
                tf.write(b'Memory = 2048\n')
                tf.flush()
                with tmpEnv(_CONDOR_MACHINE_AD=tf.name):
                    self.assertEqual(getSingleScramArch('slc6_blah_blah'),
                                     'slc6_blah_blah')
                    self.assertEqual(getSingleScramArch('slc7_blah_blah'),
                                     'slc7_blah_blah')
                    self.assertEqual(
                        getSingleScramArch(
                            ['slc6_blah_blah', 'slc7_blah_blah']),
                        'slc7_blah_blah')
                    self.assertEqual(
                        getSingleScramArch(
                            ['slc6_blah_blah', 'slc5_blah_blah']), None)
                    self.assertEqual(
                        getSingleScramArch(
                            ['slc7_blah_blah', 'slc8_blah_blah']),
                        'slc7_blah_blah')
        except Exception:
            raise
        finally:
            os.chdir(self.oldCwd)
        return

    def testCMSSWSupported(self):
        """
        Test the functionality of isCMSSWSupported function
        """
        self.assertFalse(isCMSSWSupported('CMSSW_1_2_3', ''))
        self.assertFalse(isCMSSWSupported(None, 'a'))
        self.assertFalse(isCMSSWSupported('CMSSW_1_2_3', 'CMSSW_2_2_3'))
        self.assertFalse(isCMSSWSupported('CMSSW_1_2_3', 'CMSSW_1_3_3'))
        self.assertFalse(isCMSSWSupported('CMSSW_1_2_3', 'CMSSW_1_2_4'))
        self.assertFalse(isCMSSWSupported('CMSSW_1_2_3_pre1', 'CMSSW_1_2_3'))
        self.assertFalse(isCMSSWSupported('CMSSW_1_2_3', 'CMSSW_1_2_3_pre1'))
        self.assertFalse(
            isCMSSWSupported('CMSSW_1_2_3_pre1', 'CMSSW_1_2_3_pre2'))
        self.assertFalse(
            isCMSSWSupported('CMSSW_1_2_3_pre2', 'CMSSW_1_2_3_pre1'))
        self.assertFalse(isCMSSWSupported('CMSSW_7_1_25_patch2',
                                          'CMSSW_7_6_0'))
        self.assertFalse(isCMSSWSupported('CMSSW_7_3_2', 'CMSSW_10_4_0'))

        self.assertTrue(
            isCMSSWSupported('CMSSW_1_2_3_pre1', 'CMSSW_1_2_3_pre1'))
        self.assertTrue(isCMSSWSupported('CMSSW_1_2_3', 'CMSSW_1_2_3'))
        self.assertTrue(isCMSSWSupported('CMSSW_2_2_3', 'CMSSW_1_2_3'))
        self.assertTrue(isCMSSWSupported('CMSSW_1_3_3', 'CMSSW_1_2_3'))
        self.assertTrue(isCMSSWSupported('CMSSW_1_2_4', 'CMSSW_1_2_3'))

    def testisEnforceGUIDInFileNameSupported(self):
        """
        Test functionality of the `isEnforceGUIDInFileNameSupported` function
        """
        ### invalid input
        self.assertFalse(isEnforceGUIDInFileNameSupported(None))
        self.assertFalse(isEnforceGUIDInFileNameSupported(''))

        ### forever supported
        self.assertTrue(isEnforceGUIDInFileNameSupported('CMSSW_11_0_0'))
        self.assertTrue(isEnforceGUIDInFileNameSupported('CMSSW_11_0_2'))
        self.assertTrue(isEnforceGUIDInFileNameSupported('CMSSW_11_1_0_pre1'))
        self.assertTrue(
            isEnforceGUIDInFileNameSupported('CMSSW_11_1_0_patch1'))
        self.assertTrue(isEnforceGUIDInFileNameSupported('CMSSW_11_1_1'))

        ### specific releases
        self.assertTrue(isEnforceGUIDInFileNameSupported('CMSSW_10_2_20_UL'))
        self.assertTrue(isEnforceGUIDInFileNameSupported('CMSSW_9_4_16_UL'))
        self.assertTrue(isEnforceGUIDInFileNameSupported('CMSSW_8_0_34_UL'))
        self.assertTrue(
            isEnforceGUIDInFileNameSupported('CMSSW_7_1_45_patch3'))

        ### minor supported releases
        self.assertTrue(isEnforceGUIDInFileNameSupported('CMSSW_10_6_8'))
        self.assertTrue(isEnforceGUIDInFileNameSupported('CMSSW_10_6_9'))
        self.assertTrue(
            isEnforceGUIDInFileNameSupported('CMSSW_10_6_8_patch1'))
        self.assertTrue(
            isEnforceGUIDInFileNameSupported('CMSSW_10_6_9_patch1'))
        self.assertTrue(isEnforceGUIDInFileNameSupported('CMSSW_10_2_20'))
        self.assertTrue(isEnforceGUIDInFileNameSupported('CMSSW_9_4_16'))
        self.assertTrue(isEnforceGUIDInFileNameSupported('CMSSW_9_3_17'))
        self.assertTrue(isEnforceGUIDInFileNameSupported('CMSSW_8_0_34'))

        ### releases not supported
        self.assertFalse(isEnforceGUIDInFileNameSupported('CMSSW_10_6_7'))
        self.assertFalse(isEnforceGUIDInFileNameSupported('CMSSW_10_7_0'))
        self.assertFalse(isEnforceGUIDInFileNameSupported('CMSSW_10_2_19'))
        self.assertFalse(isEnforceGUIDInFileNameSupported('CMSSW_10_3_10'))
        self.assertFalse(isEnforceGUIDInFileNameSupported('CMSSW_5_3_10'))
Exemple #45
0
class AgentTest(unittest.TestCase):
    def setUp(self):
        self.testInit = TestInit(__file__)
        self.testInit.setLogging(logLevel=logging.DEBUG)
        self.testDir = self.testInit.generateWorkDir()
        self.config = getConfig(self.testDir)
        # mock generator instance to communicate some configuration values
        self.generator = utils.AlertGeneratorMock(self.config)
        self.testComponentDaemonXml = os.path.join(self.testDir, "Daemon.xml")

    def tearDown(self):
        self.testInit.delWorkDir()
        self.generator = None

    def testComponentsPollerBasic(self):
        """
        Test ComponentsPoller class.
        Beware of different process context in real running.

        """
        config = getConfig("/tmp")
        config.component_("AlertProcessor")
        config.AlertProcessor.section_("critical")
        config.AlertProcessor.section_("soft")
        config.AlertProcessor.critical.level = 5
        config.AlertProcessor.soft.level = 0
        config.component_("AlertGenerator")
        config.AlertGenerator.section_("bogusPoller")
        config.AlertGenerator.bogusPoller.soft = 5  # [percent]
        config.AlertGenerator.bogusPoller.critical = 90  # [percent]
        config.AlertGenerator.bogusPoller.pollInterval = 2  # [second]
        # period during which measurements are collected before evaluating for possible alert triggering
        config.AlertGenerator.bogusPoller.period = 10

        # need to create some temp directory, real process and it's
        # Daemon.xml so that is looks like agents component process
        # and check back the information
        pid = os.getpid()
        config.component_("TestComponent")
        d = os.path.dirname(self.testComponentDaemonXml)
        config.TestComponent.componentDir = d
        if not os.path.exists(d):
            os.mkdir(d)
        f = open(self.testComponentDaemonXml, 'w')
        f.write(utils.daemonXmlContent % dict(PID_TO_PUT=pid))
        f.close()

        generator = utils.AlertGeneratorMock(config)
        poller = ComponentsPoller(config.AlertGenerator.bogusPoller, generator)

        # only 1 component should have valid workDir with proper Daemon.xml content
        # other components present in the configuration (AlertProcessor, AlertGenerator)
        # should have been ignored
        self.assertEqual(len(poller._components), 1)
        pd = poller._components[0]
        self.assertEqual(pd.pid, pid)
        self.assertEqual(pd.name, "TestComponent")
        self.assertEqual(len(poller._compMeasurements), 1)
        mes = poller._compMeasurements[0]
        numMeasurements = round(
            config.AlertGenerator.bogusPoller.period /
            config.AlertGenerator.bogusPoller.pollInterval, 0)
        self.assertEqual(mes._numOfMeasurements, numMeasurements)

        shutil.rmtree(d)

    def _doComponentsPoller(self,
                            thresholdToTest,
                            level,
                            config,
                            pollerClass,
                            expected=0):
        """
        Components pollers have array of Measurements and ProcessDetails
        which make it more difficult to factory with test methods from the
        utils module.

        """
        handler, receiver = utils.setUpReceiver(
            self.generator.config.Alert.address,
            self.generator.config.Alert.controlAddr)

        # need some real process to poll, give itself
        pid = os.getpid()
        # the input configuration doesn't have component work directories set right, rectify:
        # the configuration will have, see with what _doComponentsPoller is called
        # two components: AlertGenerator and AlertProcessor defined
        configInstance = Configuration.getInstance()
        for comp in Configuration.getInstance().listComponents_():
            compDir = getattr(configInstance, comp).componentDir
            compDir = os.path.join(compDir, comp)
            setattr(getattr(configInstance, comp), "componentDir", compDir)
            os.makedirs(compDir)
            f = open(os.path.join(compDir, "Daemon.xml"), 'w')
            f.write(utils.daemonXmlContent % dict(PID_TO_PUT=pid))
            f.close()

        numMeasurements = config.period / config.pollInterval
        poller = pollerClass(config, self.generator)
        # inject own input sample data provider
        # there is in fact input argument in this case which needs be ignored
        poller.sample = lambda proc_: random.randint(thresholdToTest,
                                                     thresholdToTest + 10)

        # the poller will run upon components (as defined in the configuration)
        # and poll them. the PID, etc will be run from the compomentsDir
        poller.start()
        self.assertTrue(poller.is_alive())

        if expected != 0:
            # watch so that the test can't take for ever, fail in 2mins
            timeLimitExceeded = False
            startTime = datetime.datetime.now()
            limitTime = 2 * 60  # seconds
            while len(handler.queue) == 0:
                time.sleep(config.pollInterval / 5)
                if (datetime.datetime.now() - startTime).seconds > limitTime:
                    timeLimitExceeded = True
                    break
        else:
            time.sleep(config.period * 2)

        poller.terminate()
        receiver.shutdown()
        self.assertFalse(poller.is_alive())

        if expected != 0:
            if timeLimitExceeded:
                self.fail("No alert received in %s seconds." % limitTime)
            # there should be just one alert received, poller should have the
            # change to send a second
            self.assertEqual(len(handler.queue), expected)
            a = handler.queue[0]
            # soft threshold - alert should have 'soft' level
            self.assertEqual(a["Level"], level)
            self.assertEqual(a["Component"], self.generator.__class__.__name__)
            self.assertEqual(a["Source"], poller.__class__.__name__)

    def testComponentsCPUPollerSoftThreshold(self):
        self.config.AlertGenerator.componentsCPUPoller.soft = 70
        self.config.AlertGenerator.componentsCPUPoller.critical = 80
        self.config.AlertGenerator.componentsCPUPoller.pollInterval = 0.2
        self.config.AlertGenerator.componentsCPUPoller.period = 1
        level = self.config.AlertProcessor.soft.level
        thresholdToTest = self.config.AlertGenerator.componentsCPUPoller.soft
        # expected 2: 2 components are defined by the configuration, an alert
        # will be sent for each of them
        self._doComponentsPoller(
            thresholdToTest,
            level,
            self.config.AlertGenerator.componentsCPUPoller,
            ComponentsCPUPoller,
            expected=2)

    def testComponentsCPUPollerCriticalThreshold(self):
        self.config.AlertGenerator.componentsCPUPoller.soft = 70
        self.config.AlertGenerator.componentsCPUPoller.critical = 80
        self.config.AlertGenerator.componentsCPUPoller.pollInterval = 0.2
        self.config.AlertGenerator.componentsCPUPoller.period = 1
        level = self.config.AlertProcessor.critical.level
        thresholdToTest = self.config.AlertGenerator.componentsCPUPoller.critical
        # expected 2: 2 components are defined by the configuration, an alert
        # will be sent for each of them
        self._doComponentsPoller(
            thresholdToTest,
            level,
            self.config.AlertGenerator.componentsCPUPoller,
            ComponentsCPUPoller,
            expected=2)

    def testComponentsCPUPollerNoAlert(self):
        self.config.AlertGenerator.componentsCPUPoller.soft = 70
        self.config.AlertGenerator.componentsCPUPoller.critical = 80
        self.config.AlertGenerator.componentsCPUPoller.pollInterval = 0.2
        self.config.AlertGenerator.componentsCPUPoller.period = 1
        level = 0
        # lower the threshold so that the alert never happens
        thresholdToTest = self.config.AlertGenerator.componentsCPUPoller.soft - 10
        self._doComponentsPoller(
            thresholdToTest,
            level,
            self.config.AlertGenerator.componentsCPUPoller,
            ComponentsCPUPoller,
            expected=0)

    def testComponentsMemoryPollerSoftThreshold(self):
        self.config.AlertGenerator.componentsMemPoller.soft = 70
        self.config.AlertGenerator.componentsMemPoller.critical = 80
        self.config.AlertGenerator.componentsMemPoller.pollInterval = 0.2
        self.config.AlertGenerator.componentsMemPoller.period = 1
        level = self.config.AlertProcessor.soft.level
        thresholdToTest = self.config.AlertGenerator.componentsMemPoller.soft
        # expected 2: 2 components are defined by the configuration, an alert
        # will be sent for each of them
        self._doComponentsPoller(
            thresholdToTest,
            level,
            self.config.AlertGenerator.componentsMemPoller,
            ComponentsMemoryPoller,
            expected=2)

    def testComponentsMemoryPollerCriticalThreshold(self):
        self.config.AlertGenerator.componentsMemPoller.soft = 70
        self.config.AlertGenerator.componentsMemPoller.critical = 80
        self.config.AlertGenerator.componentsMemPoller.pollInterval = 0.2
        self.config.AlertGenerator.componentsMemPoller.period = 1
        level = self.config.AlertProcessor.critical.level
        thresholdToTest = self.config.AlertGenerator.componentsMemPoller.critical
        # expected 2: 2 components are defined by the configuration, an alert
        # will be sent for each of them
        self._doComponentsPoller(
            thresholdToTest,
            level,
            self.config.AlertGenerator.componentsMemPoller,
            ComponentsMemoryPoller,
            expected=2)

    def testComponentsMemoryPollerNoAlert(self):
        self.config.AlertGenerator.componentsMemPoller.soft = 70
        self.config.AlertGenerator.componentsMemPoller.critical = 80
        self.config.AlertGenerator.componentsMemPoller.pollInterval = 0.2
        self.config.AlertGenerator.componentsMemPoller.period = 1
        level = 0
        # lower the threshold so that the alert never happens
        thresholdToTest = self.config.AlertGenerator.componentsCPUPoller.soft - 10
        self._doComponentsPoller(
            thresholdToTest,
            level,
            self.config.AlertGenerator.componentsMemPoller,
            ComponentsMemoryPoller,
            expected=0)

    def testComponentsCPUPollerPossiblyOnLiveAgent(self):
        """
        If there is currently running agent upon WMAGENT_CONFIG
        configuration, then the test will pick up live processes
        and poll them.

        """
        # check if the live agent configuration was loaded (above this class)
        if "config" in globals():
            self.config = config
            # AlertProcessor values - values for Level soft, resp. critical
            # are also needed by this AlertGenerator test
            self.config.component_("AlertProcessor")
            self.config.AlertProcessor.componentDir = "/tmp"
            self.config.AlertProcessor.section_("critical")
            self.config.AlertProcessor.section_("soft")
            self.config.AlertProcessor.critical.level = 5
            self.config.AlertProcessor.soft.level = 0

            self.config.component_("AlertGenerator")
            self.config.AlertGenerator.componentDir = "/tmp"
            self.config.section_("Alert")
            self.config.Alert.address = "tcp://127.0.0.1:6557"
            self.config.Alert.controlAddr = "tcp://127.0.0.1:6559"

            self.config.AlertGenerator.section_("componentsCPUPoller")
        else:
            self.config = getConfig("/tmp")

        self.config.AlertGenerator.componentsCPUPoller.soft = 70
        self.config.AlertGenerator.componentsCPUPoller.critical = 80
        self.config.AlertGenerator.componentsCPUPoller.pollInterval = 0.2
        self.config.AlertGenerator.componentsCPUPoller.period = 0.3

        # generator has already been instantiated, but need another one
        # with just defined configuration
        # mock generator instance to communicate some configuration values
        self.generator = utils.AlertGeneratorMock(self.config)

        handler, receiver = utils.setUpReceiver(
            self.generator.config.Alert.address,
            self.generator.config.Alert.controlAddr)

        numMeasurements = self.config.AlertGenerator.componentsCPUPoller.period / self.config.AlertGenerator.componentsCPUPoller.pollInterval
        poller = ComponentsCPUPoller(
            self.config.AlertGenerator.componentsCPUPoller, self.generator)
        # inject own input sample data provider
        thresholdToTest = self.config.AlertGenerator.componentsCPUPoller.soft
        # there is in fact input argument in this case which needs be ignored
        poller.sample = lambda proc_: random.randint(thresholdToTest - 10,
                                                     thresholdToTest)

        poller.start()
        self.assertTrue(poller.is_alive())

        # no alert shall arrive
        time.sleep(5 * self.config.AlertGenerator.componentsCPUPoller.period)

        poller.terminate()
        receiver.shutdown()
        self.assertFalse(poller.is_alive())
Exemple #46
0
class RuntimeTest(unittest.TestCase):
    """
    _RuntimeTest_

    A unittest to test the WMRuntime/WMSpec/Storage/etc tree
    """


    # This is an integration test
    __integration__ = "Any old bollocks"


    def setUp(self):
        """
        Basic setUp

        """

        self.testInit = TestInit(__file__)
        self.testInit.setLogging()
        self.testInit.setDatabaseConnection()


        self.testDir = self.testInit.generateWorkDir()

        # Random variables
        self.workloadDir = None
        self.unpackDir   = None
        self.initialDir  = os.getcwd()
        self.origPath    = sys.path


        # Create some dirs
        os.makedirs(os.path.join(self.testDir, 'packages'))

        return


    def tearDown(self):
        """
        _tearDown_
        
        Remove any references you put directly into the modules
        """

        self.testInit.delWorkDir()

        # Clean up imports
        if 'WMSandbox' in sys.modules.keys():
                del sys.modules['WMSandbox']
        if 'WMSandbox.JobIndex' in sys.modules.keys():
                del sys.modules['WMSandbox.JobIndex']
        

        return


    def createTestWorkload(self, workloadName = 'Test', emulator = True):
        """
        _createTestWorkload_

        Creates a test workload for us to run on, hold the basic necessities.
        """

        workloadDir = os.path.join(self.testDir, workloadName)

        #arguments = getTestArguments()

        #workload = rerecoWorkload("Tier1ReReco", arguments)
        #rereco = workload.getTask("ReReco")

        workload = testWorkload(emulation = emulator)
        rereco = workload.getTask("ReReco")

        # Set environment and site-local-config
        siteConfigPath = os.path.join(workloadDir, 'SITECONF/local/JobConfig/')
        if not os.path.exists(siteConfigPath):
            os.makedirs(siteConfigPath)
        shutil.copy('site-local-config.xml', siteConfigPath)
        environment = rereco.data.section_('environment')
        environment.CMS_PATH = workloadDir

        taskMaker = TaskMaker(workload, workloadDir)
        taskMaker.skipSubscription = True
        taskMaker.processWorkload()

        workload.save(workloadName)

        return workload



    def unpackComponents(self, workload):
        """
        Run the unpacker to build the directories
        IMPORTANT NOTE:
          This is not how we do things on the worker node
          On the worker node we do not run multiple tasks
          So here we create multiple tasks in different directories
          To mimic running on multiple systems

        """

        listOfTasks = getListOfTasks(workload = workload)

        self.unpackDir = os.path.join(self.testDir, 'unpack')

        if not os.path.exists(self.unpackDir):
            os.makedirs(self.unpackDir)

        os.chdir(self.unpackDir)

        sandbox  = workload.data.sandbox

        for task in listOfTasks:
            # We have to create a directory, unpack in it, and then get out
            taskName = task.name()
            taskDir = os.path.join(self.unpackDir, taskName)
            if not os.path.exists(taskDir):
                # Well then we have to make it
                os.makedirs(taskDir)
            os.chdir(taskDir)
            # Now that we're here, run the unpacker

            package  = os.path.join(self.testDir, 'packages', '%sJobPackage.pkl' % (taskName))
            jobIndex = 1

            RunUnpacker(sandbox = sandbox, package = package,
                        jobIndex = jobIndex, jobname = taskName)

            # And go back to where we started
            os.chdir(self.unpackDir)


        os.chdir(self.initialDir)

        return


    def createWMBSComponents(self, workload):
        """
        Create the WMBS Components for this job

        """

        listOfTasks = []
        listOfSubs  = []

        rerecoTask  = None
        
        for primeTask in workload.taskIterator():
            # There should only be one prime task, and it should be the rerecoTask
            rerecoTask = primeTask
            for task in primeTask.taskIterator():
                listOfTasks.append(task)

        for task in listOfTasks:
            fileset = self.getFileset()
            sub = self.createSubscriptions(task = task,
                                           fileset = fileset)
            #listOfSubs.append(sub)

        

        return



    def createSubscriptions(self, task, fileset):
        """
        Create a subscription based on a task


        """
        type = task.taskType()
        work = task.makeWorkflow()
        
        sub = Subscription(fileset = fileset,
                           workflow = work,
                           split_algo = "FileBased",
                           type = type)

        package = self.createWMBSJobs(subscription = sub,
                                      task = task)        

        packName = os.path.join(self.testDir, 'packages',
                                '%sJobPackage.pkl' %(task.name()))
        package.save(packName)

        return sub




    def createWMBSJobs(self, subscription, task):
        """
        Create the jobs for WMBS Components
        Send a subscription/task, get back a package.

        """

        splitter = SplitterFactory()
        jobfactory = splitter(subscription = subscription,
                              package = "WMCore.DataStructs",
                              generators = makeGenerators(task))
        params = task.jobSplittingParameters()
        jobGroups = jobfactory(**params)

        jobID = 1
        package = JobPackage()
        for group in jobGroups:
            for job in group.jobs:
                job['id'] = jobID
                jobID += 1
                package[job['id']] = job

        return package


    

    def getFileset(self):
        """
        Get a fileset based on the task

        """

        fileset = Fileset(name = 'Merge%s' %(type))


        for i in range(0, random.randint(15,25)):
            # Use the testDir to generate a random lfn
            inpFile = File(lfn = "%s/%s.root" %(self.testDir, makeUUID()),
                           size = random.randint(200000, 1000000),
                           events = random.randint(1000,2000) )
            inpFile.setLocation('Megiddo')
            fileset.addFile(inpFile)


        return fileset



    def runJobs(self, workload):
        """
        This might actually run the job.  Who knows?


        """
        listOfTasks = []

        for primeTask in workload.taskIterator():
            listOfTasks.append(primeTask)
            # Only run primeTasks for now


        for task in listOfTasks:
            jobName = task.name()
            taskDir = os.path.join(self.unpackDir, jobName, 'job')
            os.chdir(taskDir)
            sys.path.append(taskDir)

            # Scream, run around in panic, blow up machine
            print "About to run jobs"
            print taskDir
            miniStartup(dir = taskDir)
            

            # When exiting, go back to where you started
            os.chdir(self.initialDir)
            sys.path.remove(taskDir)
            
        return





    def testA_CreateWorkload(self):
        """
        _CreateWorkload_
        
        Create a workload
        Unpack the workload
        Check for consistency
        """

        workloadName = 'basicWorkload'
        workload     = self.createTestWorkload(workloadName = workloadName)

        self.createWMBSComponents(workload = workload)

        taskNames = []
        for task in getListOfTasks(workload = workload):
            taskNames.append(task.name())

        workloadPath  = os.path.join(self.testDir, workloadName, "TestWorkload")
        siteConfigDir = os.path.join(self.testDir, workloadName, 'SITECONF/local/JobConfig/')


        # Pre-run checks

        # Does it have the right directories?
        dirList = os.listdir(workloadPath)
        self.assertEqual(dirList, ['WMSandbox', 'TestWorkload-Sandbox.tar.bz2'])
        dirList = os.listdir(os.path.join(workloadPath, 'WMSandbox'))
        for taskName in taskNames:
            self.assertTrue(taskName in dirList)

        # Do we have job packages
        for task in taskNames:
            self.assertTrue('%sJobPackage.pkl' % (task) in os.listdir(os.path.join(self.testDir, 'packages')))


        # Does it have the SITECONF?
        self.assertTrue('site-local-config.xml' in os.listdir(siteConfigDir))



        # Now actually see if you can unpack it.
        self.unpackComponents(workload = workload)


        # Check for proper unpacking
        # Check the the task has the right directories,
        # and that the PSetTweaks and WMSandbox directories
        # have the right contents
        taskContents = ['WMSandbox', 'WMCore', 'PSetTweaks']
        PSetContents = ['PSetTweak.pyc', 'CVS', 'PSetTweak.py',
                        '__init__.pyc', 'WMTweak.py', '__init__.py']
        taskSandbox  = ['JobPackage.pcl', 'JobIndex.py', '__init__.py', 'WMWorkload.pkl']
        taskSandbox.extend(taskNames)  # Should have a directory for each task

        for task in taskNames:
            self.assertTrue(task in os.listdir(os.path.join(self.testDir, 'unpack')))
            taskDir = os.path.join(self.testDir, 'unpack', task, 'job')
            self.assertTrue(os.path.isdir(taskDir))
            self.assertEqual(os.listdir(taskDir).sort(), taskContents.sort())
            self.assertEqual(os.listdir(os.path.join(taskDir, 'WMSandbox')).sort(),
                             taskSandbox.sort())
            self.assertEqual(os.listdir(os.path.join(taskDir, 'PSetTweaks')).sort(),
                             PSetContents.sort())


        # And we're done.
        # Assume if we got this far everything is good
        

        # At the end, copy the directory
        #if os.path.exists('tmpDir'):
        #    shutil.rmtree('tmpDir')
        #shutil.copytree(self.testDir, 'tmpDir')

        return


    def testB_EmulatorTest(self):
        """
        _EmulatorTest_
        
        This is where things get scary.  We need to not only unpack the job,
        but also ascertain whether it can run locally in emulator mode.

        This requires...uh...emulator emulation.
        """


        # Assume all this works, because we tested it in testA
        workloadName = 'basicWorkload'
        workload     = self.createTestWorkload(workloadName = workloadName)

        self.createWMBSComponents(workload = workload)

        self.unpackComponents(workload = workload)


        self.runJobs(workload = workload)

        # Check the report
        taskDir = os.path.join(self.testDir, 'unpack/ReReco/job/WMTaskSpace')
        report = Report()
        report.load(os.path.join(taskDir, 'Report.0.pkl'))
        cmsReport = report.data.cmsRun1



        # Now validate the report
        self.assertEqual(report.data.ceName, socket.gethostname())
        self.assertEqual(report.data.seName, 'cmssrm.fnal.gov')
        self.assertEqual(report.data.siteName, 'T1_US_FNAL')
        self.assertEqual(report.data.hostName, socket.gethostname())
        self.assertTrue(report.data.completed)

        # Should have status 0 (emulator job)
        self.assertEqual(cmsReport.status, 0)

        # Should have one output module
        self.assertEqual(cmsReport.outputModules, ['TestOutputModule'])

        # It should have one file for input and output
        self.assertEqual(cmsReport.input.PoolSource.files.fileCount, 1)
        self.assertEqual(cmsReport.output.TestOutputModule.files.fileCount, 1)

        # So, um, I guess we're done


        # At the end, copy the directory
        #if os.path.exists('tmpDir'):
        #    shutil.rmtree('tmpDir')
        #shutil.copytree(self.testDir, 'tmpDir')

        return
Exemple #47
0
class Scram_t(unittest.TestCase):
    def setUp(self):
        self.testInit = TestInit(__file__)
        self.testInit.setLogging()
        self.testDir = self.testInit.generateWorkDir()
        self.oldCwd = os.getcwd()

    def tearDown(self):
        self.testInit.delWorkDir()

    def testA(self):
        """
        instantiate a Scram instance in test mode.
        """
        try:
            Scram(
                initialise="/bin/date",
                architecture="slc5_amd64_gcc454",
                version="CMSSW_X_Y_Z",
                test=True
            )
        except Exception as ex:
            msg = "Failed to instantiate Scram in test mode:\n %s " % str(ex)
            self.fail(msg)

    def testB(self):
        """
        instantiante a Scram instance in non-test mode
        limited what we can test here since we dont have scram etc in unittest env
        """
        try:
            Scram(
                initialise="/bin/date",
                architecture="slc5_amd64_gcc454",
                version="CMSSW_X_Y_Z"
            )
        except Exception as ex:
            msg = "Failed to instantiate Scram:\n %s " % str(ex)
            self.fail(msg)

    def testC(self):
        """
        test all method calls in test mode

        """
        s = Scram(
            initialise="/bin/date",
            architecture="slc5_amd64_gcc454",
            version="CMSSW_X_Y_Z",
            directory=self.testDir,
            test=True

        )

        try:
            status = s.project()
        except Exception as ex:
            msg = "Error running Scram.project:\n %s" % str(ex)
            self.fail(msg)

        self.assertEqual(status, 0)
        self.assertTrue(os.path.exists(s.projectArea))
        self.assertTrue("project" in s.lastExecuted)
        self.assertTrue("CMSSW_X_Y_Z" in s.lastExecuted)

        try:
            status = s.runtime()
        except Exception as ex:
            msg = "Error running Scram.runtime:\n %s" % str(ex)
            self.fail(msg)

        self.assertEqual(status, 0)
        self.assertTrue("ru -sh" in s.lastExecuted)
        self.assertTrue("TEST_MODE" in s.runtimeEnv)

        comm = "echo \"Hello World\""
        try:
            status = s(comm)
        except Exception as ex:
            msg = "Failed to call Scram object:\n %s" % str(ex)

        self.assertEqual(status, 0)
        self.assertEqual(s.lastExecuted, comm)

    def testArchMap(self):
        self.assertItemsEqual(OS_TO_ARCH['rhel6'], ['slc5', 'slc6'])
        self.assertItemsEqual(OS_TO_ARCH['rhel7'], ['slc7'])
        self.assertItemsEqual(ARCH_TO_OS['slc6'], ['rhel6'])
        self.assertItemsEqual(ARCH_TO_OS['slc7'], ['rhel7'])

    def testScramArchParsing(self):
        """
        Test the various modes of parsing for the scram arch
        """
        try:
            os.chdir(self.testDir)
            with tempfile.NamedTemporaryFile() as tf:
                tf.write('GLIDEIN_REQUIRED_OS = "rhel6" \n')
                tf.write('Memory = 2048\n')
                tf.flush()
                with tmpEnv(_CONDOR_MACHINE_AD=tf.name):
                    self.assertEqual(getSingleScramArch('slc6_blah_blah'), 'slc6_blah_blah')
                    self.assertEqual(getSingleScramArch('slc5_blah_blah'), 'slc5_blah_blah')
                    self.assertEqual(getSingleScramArch(['slc6_blah_blah', 'slc7_blah_blah']),
                                      'slc6_blah_blah')
                    self.assertEqual(getSingleScramArch(['slc6_blah_blah', 'slc5_blah_blah']),
                                      'slc6_blah_blah')
                    self.assertEqual(getSingleScramArch(['slc7_blah_blah', 'slc8_blah_blah']), None)
            with tempfile.NamedTemporaryFile() as tf:
                tf.write('GLIDEIN_REQUIRED_OS = "rhel7" \n')
                tf.write('Memory = 2048\n')
                tf.flush()
                with tmpEnv(_CONDOR_MACHINE_AD=tf.name):
                    self.assertEqual(getSingleScramArch('slc6_blah_blah'), 'slc6_blah_blah')
                    self.assertEqual(getSingleScramArch('slc7_blah_blah'), 'slc7_blah_blah')
                    self.assertEqual(getSingleScramArch(['slc6_blah_blah', 'slc7_blah_blah']),
                                      'slc7_blah_blah')
                    self.assertEqual(getSingleScramArch(['slc6_blah_blah', 'slc5_blah_blah']), None)
                    self.assertEqual(getSingleScramArch(['slc7_blah_blah', 'slc8_blah_blah']),
                                      'slc7_blah_blah')
        except Exception:
            raise
        finally:
            os.chdir(self.oldCwd)
        return
Exemple #48
0
class DBSUploadTest(unittest.TestCase):
    """
    TestCase for DBSUpload module

    """
    def setUp(self):
        """
        _setUp_

        setUp function for unittest

        """

        self.testInit = TestInit(__file__)
        self.testInit.setLogging()
        self.testInit.setDatabaseConnection()
        self.testInit.setSchema(customModules = ["WMComponent.DBS3Buffer"],
                                useDefault = False)
        self.testDir = self.testInit.generateWorkDir(deleteOnDestruction = False)
        self.configFile = EmulatorSetup.setupWMAgentConfig()

        myThread = threading.currentThread()
        self.bufferFactory = DAOFactory(package = "WMComponent.DBS3Buffer",
                                         logger = myThread.logger,
                                         dbinterface = myThread.dbi)

        locationAction = self.bufferFactory(classname = "DBSBufferFiles.AddLocation")
        locationAction.execute(siteName = "se1.cern.ch")
        locationAction.execute(siteName = "se1.fnal.gov")
        locationAction.execute(siteName = "malpaquet")
        self.dbsUrl = "https://*****:*****@attr("integration")
    def testBasicUpload(self):
        """
        _testBasicUpload_

        Verify that we can successfully upload to DBS3.  Also verify that the
        uploader correctly handles files parentage when uploading.
        """
        self.dbsApi = DbsApi(url = self.dbsUrl)
        config = self.getConfig()
        dbsUploader = DBSUploadPoller(config = config)

        # First test verifies that uploader will poll and then not do anything
        # as the database is empty.
        dbsUploader.algorithm()

        acqEra = "Summer%s" % (int(time.time()))
        parentFiles = self.createParentFiles(acqEra)

        # The algorithm needs to be run twice.  On the first iteration it will
        # create all the blocks and upload one.  On the second iteration it will
        # timeout and upload the second block.
        dbsUploader.algorithm()
        time.sleep(5)
        dbsUploader.algorithm()
        time.sleep(5)

        # Verify the files made it into DBS3.
        self.verifyData(parentFiles[0]["datasetPath"], parentFiles)

        # Inject some more parent files and some child files into DBSBuffer.
        # Run the uploader twice, only the parent files should be added to DBS3.
        (moreParentFiles, childFiles) = \
                          self.createFilesWithChildren(parentFiles, acqEra)
        dbsUploader.algorithm()
        time.sleep(5)
        dbsUploader.algorithm()
        time.sleep(5)

        self.verifyData(parentFiles[0]["datasetPath"],
                        parentFiles + moreParentFiles)

        # Run the uploader another two times to upload the child files.  Verify
        # that the child files were uploaded.
        dbsUploader.algorithm()
        time.sleep(5)
        dbsUploader.algorithm()
        time.sleep(5)

        self.verifyData(childFiles[0]["datasetPath"], childFiles)
        return

    @attr("integration")
    def testDualUpload(self):
        """
        _testDualUpload_

        Verify that the dual upload mode works correctly.
        """
        self.dbsApi = DbsApi(url = self.dbsUrl)
        config = self.getConfig()
        dbsUploader = DBSUploadPoller(config = config)
        dbsUtil = DBSBufferUtil()

        # First test verifies that uploader will poll and then not do anything
        # as the database is empty.
        dbsUploader.algorithm()

        acqEra = "Summer%s" % (int(time.time()))
        parentFiles = self.createParentFiles(acqEra)
        (moreParentFiles, childFiles) = \
                          self.createFilesWithChildren(parentFiles, acqEra)

        allFiles = parentFiles + moreParentFiles
        allBlocks = []
        for i in range(4):
            DBSBufferDataset(parentFiles[0]["datasetPath"]).create()
            blockName = parentFiles[0]["datasetPath"] + "#" + makeUUID()
            dbsBlock = DBSBufferBlock(blockName,
                                      location = "malpaquet",
                                      datasetpath =  None)
            dbsBlock.status = "Open"
            dbsBlock.setDataset(parentFiles[0]["datasetPath"], 'data', 'VALID')
            dbsUtil.createBlocks([dbsBlock])
            for file in allFiles[i * 5 : (i * 5) + 5]:
                dbsBlock.addFile(file, 'data', 'VALID')
                dbsUtil.setBlockFiles({"block": blockName, "filelfn": file["lfn"]})
                if i < 2:
                    dbsBlock.status = "InDBS"
                dbsUtil.updateBlocks([dbsBlock])
            dbsUtil.updateFileStatus([dbsBlock], "InDBS")
            allBlocks.append(dbsBlock)

        DBSBufferDataset(childFiles[0]["datasetPath"]).create()
        blockName = childFiles[0]["datasetPath"] + "#" + makeUUID()
        dbsBlock = DBSBufferBlock(blockName,
                                  location = "malpaquet",
                                  datasetpath =  None)
        dbsBlock.status = "InDBS"
        dbsBlock.setDataset(childFiles[0]["datasetPath"], 'data', 'VALID')
        dbsUtil.createBlocks([dbsBlock])
        for file in childFiles:
            dbsBlock.addFile(file, 'data', 'VALID')
            dbsUtil.setBlockFiles({"block": blockName, "filelfn": file["lfn"]})

        dbsUtil.updateFileStatus([dbsBlock], "InDBS")

        dbsUploader.algorithm()
        time.sleep(5)
        dbsUploader.algorithm()
        time.sleep(5)

        self.verifyData(parentFiles[0]["datasetPath"], parentFiles)

        # Change the status of the rest of the parent blocks so we can upload
        # them and the children.
        for dbsBlock in allBlocks:
            dbsBlock.status = "InDBS"
            dbsUtil.updateBlocks([dbsBlock])

        dbsUploader.algorithm()
        time.sleep(5)

        self.verifyData(parentFiles[0]["datasetPath"], parentFiles + moreParentFiles)

        # Run the uploader one more time to upload the children.
        dbsUploader.algorithm()
        time.sleep(5)

        self.verifyData(childFiles[0]["datasetPath"], childFiles)
        return

    def testCloseSettingsPerWorkflow(self):
        """
        _testCloseSettingsPerWorkflow_

        Test the block closing mechanics in the DBS3 uploader,
        this uses a fake dbs api to avoid reliance on external services.
        """
        # Signal trapExit that we are a friend
        os.environ["DONT_TRAP_EXIT"] = "True"
        try:
            # Monkey patch the imports of DbsApi
            from WMComponent.DBS3Buffer import DBSUploadPoller as MockDBSUploadPoller
            MockDBSUploadPoller.DbsApi = MockDbsApi

            # Set the poller and the dbsUtil for verification
            myThread = threading.currentThread()
            (_, dbsFilePath) = mkstemp(dir = self.testDir)
            self.dbsUrl = dbsFilePath
            config = self.getConfig()
            dbsUploader = MockDBSUploadPoller.DBSUploadPoller(config = config)
            dbsUtil = DBSBufferUtil()

            # First test is event based limits and timeout with no new files.
            # Set the files and workflow
            acqEra = "TropicalSeason%s" % (int(time.time()))
            workflowName = 'TestWorkload%s' % (int(time.time()))
            taskPath = '/%s/TestProcessing' % workflowName
            self.injectWorkflow(workflowName, taskPath,
                                MaxWaitTime = 2, MaxFiles = 100,
                                MaxEvents = 150)
            self.createParentFiles(acqEra, nFiles = 20,
                                   workflowName = workflowName,
                                   taskPath = taskPath)

            # The algorithm needs to be run twice.  On the first iteration it will
            # create all the blocks and upload one with less than 150 events.
            # On the second iteration the second block is uploaded.
            dbsUploader.algorithm()
            dbsUploader.checkBlocks()
            openBlocks = dbsUtil.findOpenBlocks()
            self.assertEqual(len(openBlocks), 1)
            globalFiles = myThread.dbi.processData("SELECT id FROM dbsbuffer_file WHERE status = 'InDBS'")[0].fetchall()
            notUploadedFiles = myThread.dbi.processData("SELECT id FROM dbsbuffer_file WHERE status = 'NOTUPLOADED'")[0].fetchall()
            self.assertEqual(len(globalFiles), 14)
            self.assertEqual(len(notUploadedFiles), 6)
            # Check the fake DBS for data
            fakeDBS = open(self.dbsUrl, 'r')
            fakeDBSInfo = json.load(fakeDBS)
            fakeDBS.close()
            self.assertEqual(len(fakeDBSInfo), 2)
            for block in fakeDBSInfo:
                self.assertTrue('block_events' not in block['block'])
                self.assertEqual(block['block']['file_count'], 7)
                self.assertEqual(block['block']['open_for_writing'], 0)
                self.assertTrue('close_settings' not in block)
            time.sleep(3)
            dbsUploader.algorithm()
            dbsUploader.checkBlocks()
            openBlocks = dbsUtil.findOpenBlocks()
            self.assertEqual(len(openBlocks), 0)
            fakeDBS = open(self.dbsUrl, 'r')
            fakeDBSInfo = json.load(fakeDBS)
            fakeDBS.close()
            self.assertEqual(len(fakeDBSInfo), 3)
            for block in fakeDBSInfo:
                if block['block']['file_count'] != 6:
                    self.assertEqual(block['block']['file_count'], 7)
                self.assertTrue('block_events' not in block['block'])
                self.assertEqual(block['block']['open_for_writing'], 0)
                self.assertTrue('close_settings' not in block)

            # Now check the limit by size and timeout with new files
            acqEra = "TropicalSeason%s" % (int(time.time()))
            workflowName = 'TestWorkload%s' % (int(time.time()))
            taskPath = '/%s/TestProcessing' % workflowName
            self.injectWorkflow(workflowName, taskPath,
                                MaxWaitTime = 2, MaxFiles = 5,
                                MaxEvents = 200000000)
            self.createParentFiles(acqEra, nFiles = 16,
                                   workflowName = workflowName,
                                   taskPath = taskPath)
            dbsUploader.algorithm()
            dbsUploader.checkBlocks()
            openBlocks = dbsUtil.findOpenBlocks()
            self.assertEqual(len(openBlocks), 1)
            fakeDBS = open(self.dbsUrl, 'r')
            fakeDBSInfo = json.load(fakeDBS)
            fakeDBS.close()
            self.assertEqual(len(fakeDBSInfo), 6)
            for block in fakeDBSInfo:
                if acqEra in block['block']['block_name']:
                    self.assertEqual(block['block']['file_count'], 5)
                self.assertTrue('block_events' not in block['block'])
                self.assertTrue('close_settings' not in block)
                self.assertEqual(block['block']['open_for_writing'], 0)

            # Put more files, they will go into the same block and then it will be closed
            # after timeout
            time.sleep(3)
            self.createParentFiles(acqEra, nFiles = 3,
                                   workflowName = workflowName,
                                   taskPath = taskPath)
            dbsUploader.algorithm()
            dbsUploader.checkBlocks()
            openBlocks = dbsUtil.findOpenBlocks()
            self.assertEqual(len(openBlocks), 0)
            fakeDBS = open(self.dbsUrl, 'r')
            fakeDBSInfo = json.load(fakeDBS)
            fakeDBS.close()
            self.assertEqual(len(fakeDBSInfo), 7)
            for block in fakeDBSInfo:
                if acqEra in block['block']['block_name']:
                    if block['block']['file_count'] < 5:
                        self.assertEqual(block['block']['file_count'], 4)
                    else:
                        self.assertEqual(block['block']['file_count'], 5)
                self.assertTrue('block_events' not in block['block'])
                self.assertEqual(block['block']['open_for_writing'], 0)
                self.assertTrue('close_settings' not in block)

            # Finally test size limits
            acqEra = "TropicalSeason%s" % (int(time.time()))
            workflowName = 'TestWorkload%s' % (int(time.time()))
            taskPath = '/%s/TestProcessing' % workflowName
            self.injectWorkflow(workflowName, taskPath,
                                MaxWaitTime = 1, MaxFiles = 500,
                                MaxEvents = 200000000, MaxSize = 2048)
            self.createParentFiles(acqEra, nFiles = 7,
                                   workflowName = workflowName,
                                   taskPath = taskPath)
            dbsUploader.algorithm()
            dbsUploader.checkBlocks()
            time.sleep(2)
            dbsUploader.algorithm()
            dbsUploader.checkBlocks()

            self.assertEqual(len(openBlocks), 0)
            fakeDBS = open(self.dbsUrl, 'r')
            fakeDBSInfo = json.load(fakeDBS)
            fakeDBS.close()
            self.assertEqual(len(fakeDBSInfo), 11)
            for block in fakeDBSInfo:
                if acqEra in block['block']['block_name']:
                    if block['block']['file_count'] != 1:
                        self.assertEqual(block['block']['block_size'], 2048)
                        self.assertEqual(block['block']['file_count'], 2)
                self.assertTrue('block_events' not in block['block'])
                self.assertEqual(block['block']['open_for_writing'], 0)
                self.assertTrue('close_settings' not in block)
        except:
            self.fail("We failed at some point in the test")
        finally:
            # We don't trust anyone else with _exit
            del os.environ["DONT_TRAP_EXIT"]
        return
Exemple #49
0
class ForwardSinkTest(unittest.TestCase):
    def setUp(self):
        self.testInit = TestInit(__file__)
        self.testInit.setLogging(logLevel = logging.DEBUG)
        self.testDir = self.testInit.generateWorkDir()

        self.address1 = "tcp://127.0.0.1:5557"
        self.controlAddr1 = "tcp://127.0.0.1:5559"

        self.address2 = "tcp://127.0.0.1:15557"
        self.controlAddr2 = "tcp://127.0.0.1:15559"

        self.outputfileCritical = os.path.join(self.testDir, "ForwardSinkTestCritical.json")
        self.outputfileSoft = os.path.join(self.testDir, "ForwardSinkTestSoft.json")


    def tearDown(self):
        self.testInit.delWorkDir()


    def testForwardSinkBasic(self):
        config = ConfigSection("forward")
        # address of the Processor, resp. Receiver to forward Alerts to
        config.address = self.address1
        config.controlAddr = self.controlAddr1
        config.label = "ForwardSinkTest"
        forwarder = ForwardSink(config)


    def testForwardSinkEntireChain(self):
        """
        The test chain looks as follows:
        worker -> Receiver1(+its Processor configured to do ForwardSink) -> Receiver2 whose
            address as the destination the ForwardSink is configured with -> Receiver2 will
            do FileSink so that it's possible to verify the chain.

        """
        # configuration for the Receiver+Processor+ForwardSink 1 (group)
        config1 = Configuration()
        config1.component_("AlertProcessor")
        config1.AlertProcessor.section_("critical")
        config1.AlertProcessor.section_("soft")

        config1.AlertProcessor.critical.level = 5
        config1.AlertProcessor.soft.level = 0
        config1.AlertProcessor.soft.bufferSize = 0

        config1.AlertProcessor.critical.section_("sinks")
        config1.AlertProcessor.soft.section_("sinks")

        config1.AlertProcessor.critical.sinks.section_("forward")
        config1.AlertProcessor.soft.sinks.section_("forward")
        # address of the Receiver2
        config1.AlertProcessor.critical.sinks.forward.address = self.address2
        config1.AlertProcessor.critical.sinks.forward.controlAddr = self.controlAddr2
        config1.AlertProcessor.critical.sinks.forward.label = "ForwardSinkTest"
        config1.AlertProcessor.soft.sinks.forward.address = self.address2
        config1.AlertProcessor.soft.sinks.forward.controlAddr = self.controlAddr2
        config1.AlertProcessor.soft.sinks.forward.label = "ForwardSinkTest"

        # 1) first item of the chain is source of Alerts: worker()

        # 2) second item is Receiver1 + its Processor + its ForwardSink
        processor1 = Processor(config1.AlertProcessor)
        # ForwardSink will be created automatically by the Processor
        receiver1 = Receiver(self.address1, processor1, self.controlAddr1)
        receiver1.startReceiver() # non blocking call

        # 3) third group is Receiver2 with its Processor and final FileSink
        config2 = Configuration()
        config2.component_("AlertProcessor")
        config2.AlertProcessor.section_("critical")
        config2.AlertProcessor.section_("soft")

        config2.AlertProcessor.critical.level = 5
        config2.AlertProcessor.soft.level = 0
        config2.AlertProcessor.soft.bufferSize = 0

        config2.AlertProcessor.critical.section_("sinks")
        config2.AlertProcessor.soft.section_("sinks")

        config2.AlertProcessor.critical.sinks.section_("file")
        config2.AlertProcessor.soft.sinks.section_("file")
        # configuration of the final sink
        config2.AlertProcessor.critical.sinks.file.outputfile = self.outputfileCritical
        config2.AlertProcessor.soft.sinks.file.outputfile = self.outputfileSoft

        processor2 = Processor(config2.AlertProcessor)
        # final FileSink will be automatically created by the Processor
        receiver2 = Receiver(self.address2, processor2, self.controlAddr2)
        receiver2.startReceiver() # non blocking call

        # now send the Alert messages via worker() and eventually shut the receiver1
        worker(self.address1, self.controlAddr1, 10)
        # wait until receiver1 shuts
        while receiver1.isReady():
            time.sleep(0.4)
            print "%s waiting for Receiver1 to shut ..." % inspect.stack()[0][3]

        # shut down receiver2 - need to sendShutdown() to it
        s = Sender(self.address2, self.controlAddr2, "some_id")
        s.sendShutdown()
        # wait until receiver2 shuts
        while receiver2.isReady():
            time.sleep(0.4)
            print "%s waiting for Receiver2 to shut ..." % inspect.stack()[0][3]

        # check the result in the files
        # the bufferSize for soft-level Alerts was set to 0 so all
        # Alerts should be present also in the soft-level type file
        # initial 10 Alerts (Level 0 .. 9) gets distributed though a cascade
        # of two Receivers. soft alerts with level 0 .. 4 are considered
        # so Receiver1 forwards through its ForwardSink 0 .. 4 Alerts as soft and
        # 5 .. 9 level Alerts through 'critical'. order is not guaranteed
        # critical Alerts
        fileConfig = ConfigSection("file")
        fileConfig.outputfile = self.outputfileCritical
        sink = FileSink(fileConfig)
        expectedLevels = range(5, 10) # that is 5 .. 9
        loadAlerts = sink.load()
        self.assertEqual(len(loadAlerts), len(expectedLevels))
        d = dict(very = "interesting")
        for a in loadAlerts:
            self.assertEqual(a["Details"], d)

        # soft Alerts
        fileConfig = ConfigSection("file")
        fileConfig.outputfile = self.outputfileSoft
        sink = FileSink(fileConfig)
        expectedLevels = range(0, 5) # that is 0 .. 4
        loadAlerts = sink.load()
        self.assertEqual(len(loadAlerts), len(expectedLevels))
        for a in loadAlerts:
            self.assertEqual(a["Details"], d)
Exemple #50
0
class StoreResultsTest(unittest.TestCase):
    def setUp(self):
        """
        _setUp_

        Initialize the database.
        """
        self.testInit = TestInit(__file__)
        self.testInit.setLogging()
        self.testInit.setDatabaseConnection()
        self.testInit.setSchema(customModules=["WMCore.WMBS"],
                                useDefault=False)
        self.testDir = self.testInit.generateWorkDir()
        return

    def tearDown(self):
        """
        _tearDown_

        Clear out the database.
        """
        self.testInit.clearDatabase()
        self.testInit.delWorkDir()
        return

    def testStoreResults(self):
        """
        _testStoreResults_

        Create a StoreResults workflow and verify it installs into WMBS
        correctly.
        """
        arguments = StoreResultsWorkloadFactory.getTestArguments()
        arguments.update({'CmsPath': "/uscmst1/prod/sw/cms"})

        factory = StoreResultsWorkloadFactory()
        testWorkload = factory.factoryWorkloadConstruction(
            "TestWorkload", arguments)

        testWMBSHelper = WMBSHelper(testWorkload,
                                    "StoreResults",
                                    "SomeBlock",
                                    cachepath=self.testDir)
        testWMBSHelper.createTopLevelFileset()
        testWMBSHelper._createSubscriptionsInWMBS(
            testWMBSHelper.topLevelTask, testWMBSHelper.topLevelFileset)

        testWorkflow = Workflow(name="TestWorkload",
                                task="/TestWorkload/StoreResults")
        testWorkflow.load()

        self.assertEqual(len(testWorkflow.outputMap.keys()), 2,
                         "Error: Wrong number of WF outputs.")

        goldenOutputMods = ["Merged"]
        for goldenOutputMod in goldenOutputMods:
            mergedOutput = testWorkflow.outputMap[goldenOutputMod][0][
                "merged_output_fileset"]
            unmergedOutput = testWorkflow.outputMap[goldenOutputMod][0][
                "output_fileset"]

            mergedOutput.loadData()
            unmergedOutput.loadData()

            self.assertEqual(
                mergedOutput.name,
                "/TestWorkload/StoreResults/merged-%s" % goldenOutputMod,
                "Error: Merged output fileset is wrong: %s" %
                mergedOutput.name)
            self.assertEqual(
                unmergedOutput.name,
                "/TestWorkload/StoreResults/merged-%s" % goldenOutputMod,
                "Error: Unmerged output fileset is wrong: %s." %
                unmergedOutput.name)

        logArchOutput = testWorkflow.outputMap["logArchive"][0][
            "merged_output_fileset"]
        unmergedLogArchOutput = testWorkflow.outputMap["logArchive"][0][
            "output_fileset"]
        logArchOutput.loadData()
        unmergedLogArchOutput.loadData()

        self.assertEqual(logArchOutput.name,
                         "/TestWorkload/StoreResults/merged-logArchive",
                         "Error: LogArchive output fileset is wrong.")
        self.assertEqual(unmergedLogArchOutput.name,
                         "/TestWorkload/StoreResults/merged-logArchive",
                         "Error: LogArchive output fileset is wrong.")

        topLevelFileset = Fileset(name="TestWorkload-StoreResults-SomeBlock")
        topLevelFileset.loadData()

        procSubscription = Subscription(fileset=topLevelFileset,
                                        workflow=testWorkflow)
        procSubscription.loadData()

        self.assertEqual(procSubscription["type"], "Merge",
                         "Error: Wrong subscription type.")
        self.assertEqual(procSubscription["split_algo"],
                         "ParentlessMergeBySize", "Error: Wrong split algo.")

        return
Exemple #51
0
class StoreResultsTest(unittest.TestCase):
    def setUp(self):
        """
        _setUp_

        Initialize the database.
        """
        self.testInit = TestInit(__file__)
        self.testInit.setLogging()
        self.testInit.setDatabaseConnection()
        self.testInit.setSchema(customModules=["WMCore.WMBS"],
                                useDefault=False)
        self.testDir = self.testInit.generateWorkDir()

        myThread = threading.currentThread()
        self.daoFactory = DAOFactory(package="WMCore.WMBS",
                                     logger=myThread.logger,
                                     dbinterface=myThread.dbi)
        self.listTasksByWorkflow = self.daoFactory(
            classname="Workflow.LoadFromName")
        self.listFilesets = self.daoFactory(classname="Fileset.List")
        self.listSubsMapping = self.daoFactory(
            classname="Subscriptions.ListSubsAndFilesetsFromWorkflow")

        return

    def tearDown(self):
        """
        _tearDown_

        Clear out the database.
        """
        self.testInit.clearDatabase()
        self.testInit.delWorkDir()
        return

    def testStoreResults(self):
        """
        _testStoreResults_

        Create a StoreResults workflow and verify it installs into WMBS
        correctly.
        """
        arguments = StoreResultsWorkloadFactory.getTestArguments()

        factory = StoreResultsWorkloadFactory()
        testWorkload = factory.factoryWorkloadConstruction(
            "TestWorkload", arguments)

        testWMBSHelper = WMBSHelper(testWorkload,
                                    "StoreResults",
                                    "SomeBlock",
                                    cachepath=self.testDir)
        testWMBSHelper.createTopLevelFileset()
        testWMBSHelper._createSubscriptionsInWMBS(
            testWMBSHelper.topLevelTask, testWMBSHelper.topLevelFileset)

        testWorkflow = Workflow(name="TestWorkload",
                                task="/TestWorkload/StoreResults")
        testWorkflow.load()

        self.assertEqual(len(testWorkflow.outputMap.keys()), 2,
                         "Error: Wrong number of WF outputs.")
        goldenOutputMods = {"Merged": "USER"}
        for goldenOutputMod, tier in goldenOutputMods.items():
            fset = goldenOutputMod + tier
            mergedOutput = testWorkflow.outputMap[fset][0][
                "merged_output_fileset"]
            unmergedOutput = testWorkflow.outputMap[fset][0]["output_fileset"]

            mergedOutput.loadData()
            unmergedOutput.loadData()

            self.assertEqual(
                mergedOutput.name,
                "/TestWorkload/StoreResults/merged-%s" % fset,
                "Error: Merged output fileset is wrong: %s" %
                mergedOutput.name)
            self.assertEqual(
                unmergedOutput.name,
                "/TestWorkload/StoreResults/merged-%s" % fset,
                "Error: Unmerged output fileset is wrong: %s." %
                unmergedOutput.name)

        logArchOutput = testWorkflow.outputMap["logArchive"][0][
            "merged_output_fileset"]
        unmergedLogArchOutput = testWorkflow.outputMap["logArchive"][0][
            "output_fileset"]
        logArchOutput.loadData()
        unmergedLogArchOutput.loadData()

        self.assertEqual(logArchOutput.name,
                         "/TestWorkload/StoreResults/merged-logArchive",
                         "Error: LogArchive output fileset is wrong.")
        self.assertEqual(unmergedLogArchOutput.name,
                         "/TestWorkload/StoreResults/merged-logArchive",
                         "Error: LogArchive output fileset is wrong.")

        topLevelFileset = Fileset(name="TestWorkload-StoreResults-SomeBlock")
        topLevelFileset.loadData()

        procSubscription = Subscription(fileset=topLevelFileset,
                                        workflow=testWorkflow)
        procSubscription.loadData()

        self.assertEqual(procSubscription["type"], "Merge",
                         "Error: Wrong subscription type.")
        self.assertEqual(procSubscription["split_algo"],
                         "ParentlessMergeBySize", "Error: Wrong split algo.")

        return

    def testFilesets(self):
        """
        Test workflow tasks, filesets and subscriptions creation
        """
        # expected tasks, filesets, subscriptions, etc
        expOutTasks = ['/TestWorkload/StoreResults']
        expWfTasks = [
            '/TestWorkload/StoreResults',
            '/TestWorkload/StoreResults/StoreResultsLogCollect'
        ]
        expFsets = [
            'TestWorkload-StoreResults-/MinimumBias/ComissioningHI-v1/RAW',
            '/TestWorkload/StoreResults/merged-MergedUSER',
            '/TestWorkload/StoreResults/merged-logArchive'
        ]
        subMaps = [
            (2, '/TestWorkload/StoreResults/merged-logArchive',
             '/TestWorkload/StoreResults/StoreResultsLogCollect',
             'MinFileBased', 'LogCollect'),
            (1, 'TestWorkload-StoreResults-/MinimumBias/ComissioningHI-v1/RAW',
             '/TestWorkload/StoreResults', 'ParentlessMergeBySize', 'Merge')
        ]

        testArguments = StoreResultsWorkloadFactory.getTestArguments()

        factory = StoreResultsWorkloadFactory()
        testWorkload = factory.factoryWorkloadConstruction(
            "TestWorkload", testArguments)

        testWMBSHelper = WMBSHelper(testWorkload,
                                    "StoreResults",
                                    blockName=testArguments['InputDataset'],
                                    cachepath=self.testInit.testDir)
        testWMBSHelper.createTopLevelFileset()
        testWMBSHelper._createSubscriptionsInWMBS(
            testWMBSHelper.topLevelTask, testWMBSHelper.topLevelFileset)

        self.assertItemsEqual(testWorkload.listOutputProducingTasks(),
                              expOutTasks)

        workflows = self.listTasksByWorkflow.execute(workflow="TestWorkload")
        self.assertItemsEqual([item['task'] for item in workflows], expWfTasks)

        # returns a tuple of id, name, open and last_update
        filesets = self.listFilesets.execute()
        self.assertItemsEqual([item[1] for item in filesets], expFsets)

        subscriptions = self.listSubsMapping.execute(workflow="TestWorkload",
                                                     returnTuple=True)
        self.assertItemsEqual(subscriptions, subMaps)
Exemple #52
0
class DashboardInterfaceTest(unittest.TestCase):
    """
    Test for the dashboard interface and its monitoring interaction

    Well, once I've written them it will be
    """


    def setUp(self):
        """
        Basically, do nothing

        """

        self.testInit = TestInit(__file__)
        self.testInit.setLogging()


        self.testDir = self.testInit.generateWorkDir()

        return


    def tearDown(self):
        """
        Clean up the test directory

        """

        self.testInit.delWorkDir()

        return


    def createWorkload(self):
        """
        Create a workload in order to test things

        """
        generator = WMSpecGenerator()
        workload = generator.createReRecoSpec("Tier1ReReco")
        return workload

    def createTestJob(self):
        """
        Create a test job to pass to the DashboardInterface

        """

        job = Job(name = "ThisIsASillyName")

        testFileA = File(lfn = "/this/is/a/lfnA", size = 1024, events = 10)
        testFileA.addRun(Run(1, *[45]))
        testFileB = File(lfn = "/this/is/a/lfnB", size = 1024, events = 10)
        testFileB.addRun(Run(1, *[46]))

        job.addFile(testFileA)
        job.addFile(testFileB)

        job['id'] = 1

        return job


    def createReport(self, outcome = 0):
        """
        Create a test report

        """

        jobReport = Report()
        jobReport.addStep('cmsRun1')
        jobReport.setStepStartTime(stepName = 'cmsRun1')
        jobReport.setStepStopTime(stepName = 'cmsRun1')
        if outcome:
            jobReport.addError('cmsRun1', 200, 'FakeError', 'FakeError')

        return jobReport


    def setupJobEnvironment(self, name = 'test'):
        """
        _setupJobEnvironment_

        Make some sort of environment in which to run tests
        """

        os.environ['WMAGENT_SITE_CONFIG_OVERRIDE'] = os.path.join(getTestBase(),
                                            "WMCore_t/Storage_t",
                                            "T1_US_FNAL_SiteLocalConfig.xml")
        return

    def testASuccessfulJobMonitoring(self):
        """
        _testASuccessfulJobMonitoring_

        Check that the data packets make sense when a job completes successfully
        """

        # Get the necessary objects
        name     = 'testA'
        job      = self.createTestJob()
        workload = self.createWorkload()
        task     = workload.getTask(taskName = "DataProcessing")
        report   = self.createReport()

        # Fill the job environment
        self.setupJobEnvironment(name = name)

        # Instantiate DBInfo
        dbInfo   = DashboardInfo(job = job, task = task)
        dbInfo.addDestination('127.0.0.1', 8884)

        # Check jobStart information
        data = dbInfo.jobStart()
        self.assertEqual(data['MessageType'], 'JobStatus')
        self.assertEqual(data['StatusValue'], 'running')
        self.assertEqual(data['StatusDestination'], "T1_US_FNAL")
        self.assertEqual(data['taskId'], 'wmagent_Tier1ReReco')

        # Do the first step
        step = task.getStep(stepName = "cmsRun1")

        # Do the step start
        data = dbInfo.stepStart(step = step.data)
        self.assertNotEqual(data['jobStart'], None)
        self.assertEqual(data['jobStart']['ExeStart'], step.name())
        self.assertEqual(data['jobStart']['WNHostName'], socket.gethostname())
        self.assertEqual(data['1_ExeStart'], step.name())

        #Do the step end
        data = dbInfo.stepEnd(step = step.data, stepReport = report)
        self.assertEqual(data['1_ExeEnd'], step.name())
        self.assertEqual(data['1_ExeExitCode'], 0)
        self.assertTrue(data['1_ExeWCTime'] >= 0)
        self.assertEqual(report.retrieveStep("cmsRun1").counter, 1)

        #Do a second step
        step = task.getStep(stepName = "cmsRun1")

        #Do the step start (It's not the first step)
        data = dbInfo.stepStart(step = step.data)
        self.assertEqual(data['jobStart'], None)
        self.assertEqual(data['2_ExeStart'], step.name())

        #Do the step end
        data = dbInfo.stepEnd(step = step.data, stepReport = report)
        self.assertEqual(data['2_ExeEnd'], step.name())
        self.assertEqual(data['2_ExeExitCode'], 0)
        self.assertTrue(data['2_ExeWCTime'] >= 0)
        self.assertEqual(report.retrieveStep("cmsRun1").counter, 2)

        # End the job!
        data = dbInfo.jobEnd()
        self.assertEqual(data['ExeEnd'], "cmsRun1")
        self.assertEqual(data['JobExitCode'], 0)
        self.assertEqual(data['WrapperCPUTime'], 0)
        self.assertTrue(data['WrapperWCTime'] >= 0)
        self.assertNotEqual(data['JobExitReason'], "")

        return


    def testAFailedJobMonitoring(self):
        """
        _TestAFailedJobMonitoring_

        Simulate a job that completes but fails, check that the data sent is
        correct
        """

        # Get the necessary objects
        name     = 'testB'
        job      = self.createTestJob()
        workload = self.createWorkload()
        task     = workload.getTask(taskName = "DataProcessing")
        report   = self.createReport(outcome = 1)

        # Fill the job environment
        self.setupJobEnvironment(name = name)

        # Instantiate DBInfo
        dbInfo   = DashboardInfo(job = job, task = task)
        dbInfo.addDestination('127.0.0.1', 8884)

        # Check jobStart information
        data = dbInfo.jobStart()
        self.assertEqual(data['MessageType'], 'JobStatus')
        self.assertEqual(data['StatusValue'], 'running')
        self.assertEqual(data['StatusDestination'], "T1_US_FNAL")
        self.assertEqual(data['taskId'], 'wmagent_Tier1ReReco')

        # Do the first step
        step = task.getStep(stepName = "cmsRun1")

        # Do the step start
        data = dbInfo.stepStart(step = step.data)
        self.assertNotEqual(data['jobStart'], None)
        self.assertEqual(data['jobStart']['ExeStart'], step.name())
        self.assertEqual(data['jobStart']['WNHostName'], socket.gethostname())
        self.assertEqual(data['1_ExeStart'], step.name())

        #Do the step end
        data = dbInfo.stepEnd(step = step.data, stepReport = report)
        self.assertEqual(data['1_ExeEnd'], step.name())
        self.assertNotEqual(data['1_ExeExitCode'], 0)
        self.assertTrue(data['1_ExeWCTime'] >= 0)
        self.assertEqual(report.retrieveStep("cmsRun1").counter, 1)

        # End the job!
        data = dbInfo.jobEnd()
        self.assertEqual(data['ExeEnd'], "cmsRun1")
        self.assertNotEqual(data['JobExitCode'], 0)
        self.assertEqual(data['WrapperCPUTime'], 0)
        self.assertTrue(data['WrapperWCTime'] >= 0)
        self.assertNotEqual(data['JobExitReason'].find('cmsRun1'), -1)

        return

    def testAKilledJobMonitoring(self):
        """
        _TestAKilledJobMonitoring_

        Simulate a job that is killed check that the data sent is
        correct
        """

        # Get the necessary objects
        name     = 'testC'
        job      = self.createTestJob()
        workload = self.createWorkload()
        task     = workload.getTask(taskName = "DataProcessing")
        report   = self.createReport(outcome = 1)

        # Fill the job environment
        self.setupJobEnvironment(name = name)

        # Instantiate DBInfo
        dbInfo   = DashboardInfo(job = job, task = task)
        dbInfo.addDestination('127.0.0.1', 8884)

        # Check jobStart information
        data = dbInfo.jobStart()
        self.assertEqual(data['MessageType'], 'JobStatus')
        self.assertEqual(data['StatusValue'], 'running')
        self.assertEqual(data['StatusDestination'], "T1_US_FNAL")
        self.assertEqual(data['taskId'], 'wmagent_Tier1ReReco')

        # Do the first step
        step = task.getStep(stepName = "cmsRun1")

        # Do the step start
        data = dbInfo.stepStart(step = step.data)
        self.assertNotEqual(data['jobStart'], None)
        self.assertEqual(data['jobStart']['ExeStart'], step.name())
        self.assertEqual(data['jobStart']['WNHostName'], socket.gethostname())
        self.assertEqual(data['1_ExeStart'], step.name())

        #Do the step end
        data = dbInfo.stepEnd(step = step.data, stepReport = report)
        self.assertEqual(data['1_ExeEnd'], step.name())
        self.assertNotEqual(data['1_ExeExitCode'], 0)
        self.assertTrue(data['1_ExeWCTime'] >= 0)

        # Kill the job!
        data = dbInfo.jobKilled()
        self.assertEqual(data['ExeEnd'], "cmsRun1")
        self.assertNotEqual(data['JobExitCode'], 0)
        self.assertEqual(data['WrapperCPUTime'], 0)
        self.assertTrue(data['WrapperWCTime'] >= 0)
        self.assertNotEqual(data['JobExitReason'].find('killed'), -1)

        return

    @attr('integration')
    def testGetDN(self):
        """
        _testGetDN_

        Checks that we can get a DN
        """
        dn = getUserProxyDN()
        if 'X509_USER_PROXY' in os.environ:
            self.assertNotEqual(dn, None, 'Error: This should get a DN, if you have set one')
        else:
            self.assertEqual(dn, None, 'Error: There is no proxy in the environment, it should not get one')
Exemple #53
0
class deleteFileTest(unittest.TestCase):


    def setUp(self):
        # stolen from CMSSWExecutor_t. thanks, dave
        self.testInit = TestInit(__file__)
        self.testDir = self.testInit.generateWorkDir()
        shutil.copyfile('/etc/hosts', os.path.join(self.testDir, 'testfile'))

        self.workload = newWorkload("UnitTests")
        self.task = self.workload.newTask("DeleterTask")
        stepHelper = step = self.task.makeStep("DeleteTest")
        self.step = stepHelper.data
        self.actualStep = stepHelper
        template = DeleteTemplate()
        template(self.step)
        self.helper = template.helper(self.step)
        self.executor = StepFactory.getStepExecutor(self.actualStep.stepType())

        taskMaker = TaskMaker(self.workload, self.testDir)
        taskMaker.skipSubscription = True
        taskMaker.processWorkload()

        self.sandboxDir = "%s/UnitTests" % self.testDir

        self.task.build(self.testDir)
        sys.path.insert(0, self.testDir)
        sys.path.insert(0, self.sandboxDir)


        self.job = Job(name = "/UnitTest/DeleterTask/DeleteTest-test-job")

        binDir = inspect.getsourcefile(ModuleLocator)
        binDir = binDir.replace("__init__.py", "bin")

        if not binDir in os.environ['PATH']:
            os.environ['PATH'] = "%s:%s" % (os.environ['PATH'], binDir)

    def tearDown(self):
        self.testInit.delWorkDir()
        sys.path.remove(self.testDir)
        sys.path.remove(self.sandboxDir)

    def setLocalOverride(self, step):
        step.section_('override')
        step.override.command    = 'test-copy'
        step.override.option     = ''
        step.override.__setattr__('lfn-prefix', '')
        step.override.__setattr__('phedex-node','DUMMYPNN')


    @attr('integration')
    def testManualDeleteOld(self):
        self.assertTrue(os.path.exists( os.path.join(self.testDir, 'testfile')))
        self.step.section_('filesToDelete')
        self.step.filesToDelete.file1 = os.path.join(self.testDir, 'testfile')
        self.setLocalOverride(self.step)
        self.executor.initialise(self.step, self.job)
        self.executor.execute()
        self.assertFalse(os.path.exists( os.path.join(self.testDir, 'testfile')))
        return

    @attr('integration')
    def testManualDeleteNew(self):
        self.assertTrue(os.path.exists( os.path.join(self.testDir, 'testfile')))
        self.step.section_('filesToDelete')
        self.step.filesToDelete.file1 = os.path.join(self.testDir, 'testfile')
        self.setLocalOverride(self.step)
        self.step.override.newStageOut = True
        self.executor.initialise(self.step, self.job)
        self.executor.execute()
        self.assertFalse(os.path.exists( os.path.join(self.testDir, 'testfile')))
        return

    @attr('integration')
    def testJobDeleteOld(self):
        self.assertTrue(os.path.exists( os.path.join(self.testDir, 'testfile')))
        self.setLocalOverride(self.step)
        self.job['input_files'] = [ {'lfn': os.path.join(self.testDir, 'testfile') } ]
        self.executor.initialise(self.step, self.job)
        self.executor.execute()
        self.assertFalse(os.path.exists( os.path.join(self.testDir, 'testfile')))
        return

    @attr('integration')
    def testJobDeleteNew(self):
        self.assertTrue(os.path.exists( os.path.join(self.testDir, 'testfile')))
        self.setLocalOverride(self.step)
        self.step.override.newStageOut = True
        self.job['input_files'] = [ {'lfn': os.path.join(self.testDir, 'testfile') } ]
        self.executor.initialise(self.step, self.job)
        self.executor.execute()
        self.assertFalse(os.path.exists( os.path.join(self.testDir, 'testfile')))
        return
Exemple #54
0
class WMAgentTest(unittest.TestCase):
    """
    _WMAgentTest_

    Global unittest for all WMAgent components
    """

    # This is an integration test
    __integration__ = "Any old bollocks"

    sites = ['T2_US_Florida', 'T2_US_UCSD', 'T2_TW_Taiwan', 'T1_CH_CERN']
    components = ['JobCreator', 'JobSubmitter', 'JobTracker',
                  'JobAccountant', 'JobArchiver', 'TaskArchiver',
                  'RetryManager', 'ErrorHandler']

    def setUp(self):
        """
        _setUp_

        Set up vital components
        """

        self.testInit = TestInit(__file__)
        self.testInit.setLogging()
        self.testInit.setDatabaseConnection()
        self.testInit.setSchema(customModules = ["WMCore.WMBS",'WMCore.MsgService',
                                                 'WMCore.ResourceControl', 'WMCore.ThreadPool',
                                                 'WMCore.Agent.Database'],
                                useDefault = False)

        myThread = threading.currentThread()
        self.daoFactory = DAOFactory(package = "WMCore.WMBS",
                                     logger = myThread.logger,
                                     dbinterface = myThread.dbi)



        locationAction = self.daoFactory(classname = "Locations.New")
        pendingSlots  = self.daoFactory(classname = "Locations.SetPendingSlots")


        for site in self.sites:
            locationAction.execute(siteName = site, seName = 'se.%s' % (site), ceName = site)
            pendingSlots.execute(siteName = site, pendingSlots = 1000)


        #Create sites in resourceControl
        resourceControl = ResourceControl()
        for site in self.sites:
            resourceControl.insertSite(siteName = site, seName = 'se.%s' % (site), ceName = site)
            resourceControl.insertThreshold(siteName = site, taskType = 'Processing', \
                                            maxSlots = 10000, pendingSlots = 10000)


        self.testDir = self.testInit.generateWorkDir()


        # Set heartbeat
        for component in self.components:
            heartbeatAPI = HeartbeatAPI(component)
            heartbeatAPI.registerComponent()




        return


    def tearDown(self):
        """
        _tearDown_

        Tear down everything and go home.
        """

        self.testInit.clearDatabase()

        self.testInit.delWorkDir()

        return




    def createTestWorkload(self, workloadName = 'Test', emulator = True):
        """
        _createTestWorkload_

        Creates a test workload for us to run on, hold the basic necessities.
        """


        workload = testWorkload("TestWorkload")
        rereco = workload.getTask("ReReco")


        taskMaker = TaskMaker(workload, os.path.join(self.testDir, 'workloadTest'))
        taskMaker.skipSubscription = True
        taskMaker.processWorkload()

        workload.save(workloadName)

        return workload




    def getConfig(self):
        """
        _getConfig_

        This is the global test configuration object
        """



        config = Configuration()

        config.component_("Agent")
        config.Agent.WMSpecDirectory = self.testDir
        config.Agent.agentName       = 'testAgent'
        config.Agent.componentName   = 'test'


        # First the general stuff
        config.section_("General")
        config.General.workDir = os.getenv("TESTDIR", self.testDir)

        # Now the CoreDatabase information
        # This should be the dialect, dburl, etc

        config.section_("CoreDatabase")
        config.CoreDatabase.connectUrl = os.getenv("DATABASE")
        config.CoreDatabase.socket     = os.getenv("DBSOCK")



        # JobCreator
        config.component_("JobCreator")
        config.JobCreator.namespace = 'WMComponent.JobCreator.JobCreator'
        config.JobCreator.logLevel  = 'DEBUG'
        config.JobCreator.maxThreads                = 1
        config.JobCreator.UpdateFromResourceControl = True
        config.JobCreator.pollInterval              = 10
        config.JobCreator.jobCacheDir               = self.testDir
        config.JobCreator.defaultJobType            = 'processing' #Type of jobs that we run, used for resource control
        config.JobCreator.workerThreads             = 2
        config.JobCreator.componentDir              = os.path.join(os.getcwd(), 'Components')



        # JobSubmitter
        config.component_("JobSubmitter")
        config.JobSubmitter.namespace     = 'WMComponent.JobSubmitter.JobSubmitter'
        config.JobSubmitter.logLevel      = 'INFO'
        config.JobSubmitter.maxThreads    = 1
        config.JobSubmitter.pollInterval  = 10
        config.JobSubmitter.pluginName    = 'CondorGlobusPlugin'
        config.JobSubmitter.pluginDir     = 'JobSubmitter.Plugins'
        config.JobSubmitter.submitDir     = os.path.join(self.testDir, 'submit')
        config.JobSubmitter.submitNode    = os.getenv("HOSTNAME", 'badtest.fnal.gov')
        config.JobSubmitter.submitScript  = os.path.join(getWMBASE(),
                                                         'test/python/WMComponent_t/JobSubmitter_t',
                                                         'submit.sh')
        config.JobSubmitter.componentDir  = os.path.join(os.getcwd(), 'Components')
        config.JobSubmitter.workerThreads = 2
        config.JobSubmitter.jobsPerWorker = 200




        # JobTracker
        config.component_("JobTracker")
        config.JobTracker.logLevel      = 'DEBUG'
        config.JobTracker.pollInterval  = 10
        config.JobTracker.trackerName   = 'CondorTracker'
        config.JobTracker.pluginDir     = 'WMComponent.JobTracker.Plugins'
        config.JobTracker.componentDir  = os.path.join(os.getcwd(), 'Components')
        config.JobTracker.runTimeLimit  = 7776000 #Jobs expire after 90 days
        config.JobTracker.idleTimeLimit = 7776000
        config.JobTracker.heldTimeLimit = 7776000
        config.JobTracker.unknTimeLimit = 7776000



        # JobAccountant
        config.component_("JobAccountant")
        config.JobAccountant.pollInterval = 60
        config.JobAccountant.componentDir = os.path.join(os.getcwd(), 'Components')
        config.JobAccountant.logLevel     = 'INFO'



        # JobArchiver
        config.component_("JobArchiver")
        config.JobArchiver.pollInterval          = 60
        config.JobArchiver.logLevel              = 'INFO'
        config.JobArchiver.logDir                = os.path.join(self.testDir, 'logs')
        config.JobArchiver.componentDir          = os.path.join(os.getcwd(), 'Components')
        config.JobArchiver.numberOfJobsToCluster = 1000



        # Task Archiver
        config.component_("TaskArchiver")
        config.TaskArchiver.componentDir    = self.testInit.generateWorkDir()
        config.TaskArchiver.WorkQueueParams = {}
        config.TaskArchiver.pollInterval    = 60
        config.TaskArchiver.logLevel        = 'INFO'
        config.TaskArchiver.timeOut         = 0



        # JobStateMachine
        config.component_('JobStateMachine')
        config.JobStateMachine.couchurl        = os.getenv('COUCHURL',
                                                           'mnorman:[email protected]:5984')
        config.JobStateMachine.couchDBName     = "mnorman_test"


        # Needed, because this is a test
        os.makedirs(config.JobSubmitter.submitDir)


        return config



    def createFileCollection(self, name, nSubs, nFiles, workflowURL = 'test', site = None):
        """
        _createFileCollection_

        Create a collection of files for splitting into jobs
        """

        myThread = threading.currentThread()

        testWorkflow = Workflow(spec = workflowURL, owner = "mnorman",
                                name = name, task="/TestWorkload/ReReco")
        testWorkflow.create()

        for sub in range(nSubs):

            nameStr = '%s-%i' % (name, sub)

            testFileset = Fileset(name = nameStr)
            testFileset.create()

            for f in range(nFiles):
                # pick a random site
                if not site:
                    tmpSite = 'se.%s' % (random.choice(self.sites))
                else:
                    tmpSite = 'se.%s' % (site)
                testFile = File(lfn = "/lfn/%s/%i" % (nameStr, f), size = 1024, events = 10)
                testFile.setLocation(tmpSite)
                testFile.create()
                testFileset.addFile(testFile)

            testFileset.commit()
            testFileset.markOpen(isOpen = 0)
            testSubscription = Subscription(fileset = testFileset,
                                            workflow = testWorkflow,
                                            type = "Processing",
                                            split_algo = "FileBased")
            testSubscription.create()


        return



    def createReports(self, jobs, retryCount = 0):
        """
        _createReports_

        Create some dummy job reports for each job
        """


        report = Report()
        report.addStep('testStep', 0)

        for job in jobs:
            #reportPath = os.path.join(job['cache_dir'], 'Report.%i.pkl' % (retryCount))
            reportPath = job['fwjr_path']
            if os.path.exists(reportPath):
                os.remove(reportPath)
            report.save(reportPath)


        return



    def testA_StraightThrough(self):
        """
        _StraightThrough_

        Just run everything straight through without any variations
        """
        # Do pre-submit job check
        nRunning = getCondorRunningJobs()
        self.assertEqual(nRunning, 0, "User currently has %i running jobs.  Test will not continue" % (nRunning))

        myThread = threading.currentThread()
        workload = self.createTestWorkload()
        config   = self.getConfig()


        name         = 'WMAgent_Test1'
        site         = self.sites[0]
        nSubs        = 5
        nFiles       = 10
        workloadPath = os.path.join(self.testDir, 'workloadTest',
                                    'TestWorkload', 'WMSandbox',
                                    'WMWorkload.pkl')

        # Create a collection of files
        self.createFileCollection(name = name, nSubs = nSubs,
                                  nFiles = nFiles,
                                  workflowURL = workloadPath,
                                  site = site)



        ############################################################
        # Test the JobCreator


        config.Agent.componentName = 'JobCreator'
        testJobCreator = JobCreatorPoller(config = config)

        testJobCreator.algorithm()
        time.sleep(5)


        # Did all jobs get created?
        getJobsAction = self.daoFactory(classname = "Jobs.GetAllJobs")
        result = getJobsAction.execute(state = 'Created', jobType = "Processing")
        self.assertEqual(len(result), nSubs*nFiles)


        # Count database objects
        result = myThread.dbi.processData('SELECT * FROM wmbs_sub_files_acquired')[0].fetchall()
        self.assertEqual(len(result), nSubs * nFiles)


        # Find the test directory
        testDirectory = os.path.join(self.testDir, 'TestWorkload', 'ReReco')
        self.assertTrue('JobCollection_1_0' in os.listdir(testDirectory))
        self.assertTrue(len(os.listdir(testDirectory)) <= 20)

        groupDirectory = os.path.join(testDirectory, 'JobCollection_1_0')

        # First job should be in here
        self.assertTrue('job_1' in os.listdir(groupDirectory))
        jobFile = os.path.join(groupDirectory, 'job_1', 'job.pkl')
        self.assertTrue(os.path.isfile(jobFile))
        f = open(jobFile, 'r')
        job = cPickle.load(f)
        f.close()


        self.assertEqual(job['workflow'], name)
        self.assertEqual(len(job['input_files']), 1)
        self.assertEqual(os.path.basename(job['sandbox']), 'TestWorkload-Sandbox.tar.bz2')










        ###############################################################
        # Now test the JobSubmitter

        config.Agent.componentName = 'JobSubmitter'
        testJobSubmitter = JobSubmitterPoller(config = config)


        testJobSubmitter.algorithm()


        # Check that jobs are in the right state
        result = getJobsAction.execute(state = 'Created', jobType = "Processing")
        self.assertEqual(len(result), 0)
        result = getJobsAction.execute(state = 'Executing', jobType = "Processing")
        self.assertEqual(len(result), nSubs * nFiles)



        # Check assigned locations
        getLocationAction = self.daoFactory(classname = "Jobs.GetLocation")
        for id in result:
            loc = getLocationAction.execute(jobid = id)
            self.assertEqual(loc, [[site]])


        # Check to make sure we have running jobs
        nRunning = getCondorRunningJobs()
        self.assertEqual(nRunning, nFiles * nSubs)


        #################################################################
        # Now the JobTracker


        config.Agent.componentName = 'JobTracker'
        testJobTracker = JobTrackerPoller(config = config)
        testJobTracker.setup()

        testJobTracker.algorithm()

        # Running the algo without removing the jobs should do nothing
        result = getJobsAction.execute(state = 'Executing', jobType = "Processing")
        self.assertEqual(len(result), nSubs * nFiles)


        condorRM()
        time.sleep(1)

        # All jobs gone?
        nRunning = getCondorRunningJobs()
        self.assertEqual(nRunning, 0)


        testJobTracker.algorithm()
        time.sleep(5)

        # Running the algo without removing the jobs should do nothing
        result = getJobsAction.execute(state = 'Executing', jobType = "Processing")
        self.assertEqual(len(result), 0)
        result = getJobsAction.execute(state = 'Complete', jobType = "Processing")
        self.assertEqual(len(result), nSubs * nFiles)




        #################################################################
        # Now the JobAccountant

        # First you need to load all jobs


        self.getFWJRAction = self.daoFactory(classname = "Jobs.GetFWJRByState")
        completeJobs       = self.getFWJRAction.execute(state = "complete")


        # Create reports for all jobs
        self.createReports(jobs = completeJobs, retryCount = 0)






        config.Agent.componentName = 'JobAccountant'
        testJobAccountant = JobAccountantPoller(config = config)
        testJobAccountant.setup()


        # It should do something with the jobs
        testJobAccountant.algorithm()


        # All the jobs should be done now
        result = getJobsAction.execute(state = 'Complete', jobType = "Processing")
        self.assertEqual(len(result), 0)
        result = getJobsAction.execute(state = 'Success', jobType = "Processing")
        self.assertEqual(len(result), nSubs * nFiles)



        #######################################################################
        # Now the JobArchiver


        config.Agent.componentName = 'JobArchiver'
        testJobArchiver = JobArchiverPoller(config = config)


        testJobArchiver.algorithm()

        # All the jobs should be cleaned up
        result = getJobsAction.execute(state = 'Success', jobType = "Processing")
        self.assertEqual(len(result), 0)
        result = getJobsAction.execute(state = 'Cleanout', jobType = "Processing")
        self.assertEqual(len(result), nSubs * nFiles)


        logDir = os.path.join(self.testDir, 'logs')

        for job in completeJobs:
            self.assertFalse(os.path.exists(job['fwjr_path']))
            jobFolder = 'JobCluster_%i' \
                    % (int(job['id']/config.JobArchiver.numberOfJobsToCluster))
            jobPath = os.path.join(logDir, jobFolder, 'Job_%i.tar' %(job['id']))
            self.assertTrue(os.path.isfile(jobPath))
            self.assertTrue(os.path.getsize(jobPath) > 0)




        ###########################################################################
        # Now the TaskAchiver


        config.Agent.componentName = 'TaskArchiver'
        testTaskArchiver = TaskArchiverPoller(config = config)


        testTaskArchiver.algorithm()


        result = getJobsAction.execute(state = 'Cleanout', jobType = "Processing")
        self.assertEqual(len(result), 0)


        for jdict in completeJobs:
            job = Job(id = jdict['id'])
            self.assertFalse(job.exists())





        if os.path.isdir('testDir'):
            shutil.rmtree('testDir')
        shutil.copytree('%s' %self.testDir, os.path.join(os.getcwd(), 'testDir'))




        return
Exemple #55
0
class BaseTest(unittest.TestCase):
    """
    Some methods of this class are made static and are used from
    other test cases.

    """
    def setUp(self):
        self.testInit = TestInit(__file__)
        self.testInit.setLogging(logLevel=logging.DEBUG)
        self.testDir = self.testInit.generateWorkDir()
        self.config = getConfig(self.testDir)
        self.testComponentDaemonXml = "/tmp/TestComponent/Daemon.xml"

    def tearDown(self):
        self.testInit.delWorkDir()
        self.generator = None
        # if the directory and file "/tmp/TestComponent/Daemon.xml" after
        # ComponentsPoller test exist, then delete it
        d = os.path.dirname(self.testComponentDaemonXml)
        if os.path.exists(d):
            shutil.rmtree(d)

    def testSenderReceiverBasic(self):
        sender = Sender(self.config.Alert.address,
                        self.config.Alert.controlAddr, self.__class__.__name__)
        handler, receiver = utils.setUpReceiver(self.config.Alert.address,
                                                self.config.Alert.controlAddr)
        a = Alert(Component=inspect.stack()[0][3])
        sender(a)
        while len(handler.queue) == 0:
            time.sleep(0.5)
            print "%s waiting for alert to arrive" % inspect.stack()[0][3]
        receiver.shutdown()
        self.assertEqual(len(handler.queue), 1)
        self.assertEqual(handler.queue[0]["Component"], inspect.stack()[0][3])

    def testProcessDetailBasic(self):
        pid = os.getpid()
        name = inspect.stack()[0][3]  # test name
        pd = ProcessDetail(pid, name)
        self.assertEqual(pd.pid, pid)
        self.assertEqual(pd.name, name)
        self.assertEqual(pd.proc.pid, pid)
        numChildren = len(psutil.Process(pid).get_children())
        self.assertEqual(len(pd.children), numChildren)
        self.assertEqual(len(pd.allProcs), 1 + numChildren)
        d = pd.getDetails()
        self.assertEqual(d["pid"], pid)
        self.assertEqual(d["component"], name)
        self.assertEqual(d["numChildrenProcesses"], numChildren)
        pd.refresh()

    def testMeasurementsBasic(self):
        numMes = 10
        mes = Measurements(numMes)
        self.assertEqual(mes._numOfMeasurements, numMes)
        self.assertEqual(len(mes), 0)
        mes.append(20)
        self.assertEqual(len(mes), 1)
        self.assertEqual(mes[0], 20)
        mes.append(30)
        self.assertEqual(mes[1], 30)
        mes.clear()
        self.assertEqual(len(mes), 0)
        self.assertEqual(mes._numOfMeasurements, numMes)

    def testBasePollerBasic(self):
        config = getConfig("/tmp")
        # create some non-sence config section. just need a bunch of values defined
        config.AlertGenerator.section_("bogusPoller")
        config.AlertGenerator.bogusPoller.soft = 5  # [percent]
        config.AlertGenerator.bogusPoller.critical = 50  # [percent]
        config.AlertGenerator.bogusPoller.pollInterval = 2  # [second]
        config.AlertGenerator.bogusPoller.period = 10

        generator = utils.AlertGeneratorMock(config)
        poller = BasePoller(config.AlertGenerator.bogusPoller, generator)
        # define dummy check method
        poller.check = lambda: 1 + 1
        poller.start()
        # poller now runs
        time.sleep(config.AlertGenerator.bogusPoller.pollInterval * 2)
        poller.terminate()
        while poller.is_alive():
            time.sleep(0.2)
            print "%s waiting for test poller to terminate" % inspect.stack(
            )[0][3]

    def testBasePollerHandleFailedPolling(self):
        config = getConfig("/tmp")
        # create some non-sence config section. just need a bunch of values defined
        config.AlertGenerator.section_("bogusPoller")
        config.AlertGenerator.bogusPoller.soft = 5  # [percent]
        config.AlertGenerator.bogusPoller.critical = 50  # [percent]
        config.AlertGenerator.bogusPoller.pollInterval = 2  # [second]
        config.AlertGenerator.bogusPoller.period = 10

        generator = utils.AlertGeneratorMock(config)
        poller = BasePoller(config.AlertGenerator.bogusPoller, generator)
        ex = Exception("test exception")

        class Sender(object):
            def __call__(self, alert):
                self.alert = alert

        poller.sender = Sender()
        poller._handleFailedPolling(ex)
        self.assertEqual(poller.sender.alert["Source"], "BasePoller")

    def testPeriodPollerOnRealProcess(self):
        config = getConfig("/tmp")
        config.component_("AlertProcessor")
        config.AlertProcessor.section_("critical")
        config.AlertProcessor.section_("soft")
        config.AlertProcessor.critical.level = 5
        config.AlertProcessor.soft.level = 0
        config.component_("AlertGenerator")
        config.AlertGenerator.section_("bogusPoller")
        config.AlertGenerator.bogusPoller.soft = 5  # [percent]
        config.AlertGenerator.bogusPoller.critical = 50  # [percent]
        config.AlertGenerator.bogusPoller.pollInterval = 0.2  # [second]
        # period during which measurements are collected before evaluating for
        # possible alert triggering
        config.AlertGenerator.bogusPoller.period = 1

        generator = utils.AlertGeneratorMock(config)
        poller = PeriodPoller(config.AlertGenerator.bogusPoller, generator)
        poller.sender = utils.SenderMock()
        # get CPU usage percentage, it's like measuring CPU usage of a real
        # component, so use the appropriate poller's method for that
        # (PeriodPoller itself is higher-level class so it doesn't define
        # a method to provide sampling data)
        poller.sample = lambda processDetail: ComponentsCPUPoller.sample(
            processDetail)

        # get own pid
        pid = os.getpid()
        name = inspect.stack()[0][3]  # test name
        pd = ProcessDetail(pid, name)
        # need to repeat sampling required number of measurements
        numOfMeasurements = int(config.AlertGenerator.bogusPoller.period /
                                config.AlertGenerator.bogusPoller.pollInterval)
        mes = Measurements(numOfMeasurements)
        self.assertEqual(len(mes), 0)
        for i in range(mes._numOfMeasurements):
            poller.check(pd, mes)

        # since the whole measurement cycle was done, values should have been nulled
        self.assertEqual(len(mes), 0)

    def testPeriodPollerCalculationPredefinedInput(self):
        config = getConfig("/tmp")
        config.component_("AlertProcessor")
        config.AlertProcessor.section_("critical")
        config.AlertProcessor.section_("soft")
        config.AlertProcessor.critical.level = 5
        config.AlertProcessor.soft.level = 0
        config.component_("AlertGenerator")
        config.AlertGenerator.section_("bogusPoller")
        # put some threshold numbers, just need to check output calculation
        # from check() method
        config.AlertGenerator.bogusPoller.soft = 5  # [percent]
        config.AlertGenerator.bogusPoller.critical = 50  # [percent]
        config.AlertGenerator.bogusPoller.pollInterval = 0.2  # [second]
        config.AlertGenerator.bogusPoller.period = 1

        generator = utils.AlertGeneratorMock(config)
        poller = PeriodPoller(config.AlertGenerator.bogusPoller, generator)
        # since poller may trigger an alert, give it mock sender
        poller.sender = utils.SenderMock()
        # provide sample method with predefined input, float
        predefInput = 10.12
        poller.sample = lambda processDetail: predefInput

        processDetail = None
        numOfMeasurements = int(config.AlertGenerator.bogusPoller.period /
                                config.AlertGenerator.bogusPoller.pollInterval)
        mes = Measurements(numOfMeasurements)
        for i in range(mes._numOfMeasurements):
            poller.check(processDetail, mes)

        # the above loop should went 5 times, should reach evaluation of 5 x predefInput
        # values, the average should end up 10, which should trigger soft threshold
        self.assertEqual(len(poller.sender.queue), 1)
        a = poller.sender.queue[0]

        self.assertEqual(a["Component"], generator.__class__.__name__)
        self.assertEqual(a["Source"], poller.__class__.__name__)
        d = a["Details"]
        self.assertEqual(d["threshold"],
                         "%s%%" % config.AlertGenerator.bogusPoller.soft)
        self.assertEqual(d["numMeasurements"], mes._numOfMeasurements)
        self.assertEqual(d["period"], config.AlertGenerator.bogusPoller.period)
        self.assertEqual(d["average"], "%s%%" % predefInput)
        # since the whole measurement cycle was done, values should have been nulled
        self.assertEqual(len(mes), 0)