def setUp(self): """ _setUp_ """ self.transitions = Transitions() self.testInit = TestInitCouchApp(__file__) self.testInit.setLogging() self.testInit.setDatabaseConnection() self.testInit.setupCouch("changestate_t/jobs", "JobDump") self.testInit.setupCouch("changestate_t/fwjrs", "FWJRDump") self.testInit.setupCouch("job_summary", "WMStats") self.testInit.setSchema(customModules = ["WMCore.WMBS"], useDefault = False) myThread = threading.currentThread() self.daoFactory = DAOFactory(package = "WMCore.WMBS", logger = myThread.logger, dbinterface = myThread.dbi) couchurl = os.getenv("COUCHURL") self.couchServer = CouchServer(dburl = couchurl) self.config = self.testInit.getConfiguration() self.taskName = "/TestWorkflow/ReReco1" self.specGen = WMSpecGenerator() self.specUrl = self.specGen.createProcessingSpec("TestWorkflow", returnType="file") return
def setUp(self): """ If we dont have a wmspec file create one """ WorkQueueTestCase.setUp(self) self.cacheDir = tempfile.mkdtemp() self.specGenerator = WMSpecGenerator(self.cacheDir) self.specs = self.createReRecoSpec(1, "file") # setup rucio parameters for global/local queue self.queueParams = {} self.queueParams['log_reporter'] = "lq_profile_test" self.queueParams['rucioAccount'] = "wma_test" self.queueParams['rucioAuthUrl'] = "http://cmsrucio-int.cern.ch" self.queueParams['rucioUrl'] = "https://cmsrucio-auth-int.cern.ch" # Create queues self.localQueue = localQueue(DbName=self.queueDB, InboxDbName=self.queueInboxDB, NegotiationTimeout=0, QueueURL='global.example.com', CacheDir=self.cacheDir, central_logdb_url="%s/%s" % (self.couchURL, self.logDBName), **self.queueParams)
def setUp(self): """ _setUp_ """ super(WorkQueueTest, self).setUp() self.specGenerator = WMSpecGenerator("WMSpecs") # self.configFile = EmulatorSetup.setupWMAgentConfig() self.schema = [] self.couchApps = ["WorkQueue"] self.testInit = TestInitCouchApp('WorkQueueServiceTest') self.testInit.setLogging() self.testInit.setDatabaseConnection() self.testInit.setSchema(customModules=self.schema, useDefault=False) self.testInit.setupCouch('workqueue_t', *self.couchApps) self.testInit.setupCouch('workqueue_t_inbox', *self.couchApps) self.testInit.setupCouch('local_workqueue_t', *self.couchApps) self.testInit.setupCouch('local_workqueue_t_inbox', *self.couchApps) self.testInit.generateWorkDir() # setup rucio parameters for global/local queue self.queueParams = {} self.queueParams['log_reporter'] = "Services_WorkQueue_Unittest" self.queueParams['rucioAccount'] = "wma_test" self.queueParams['rucioAuthUrl'] = "http://cmsrucio-int.cern.ch" self.queueParams['rucioUrl'] = "https://cmsrucio-auth-int.cern.ch" if PY3: self.assertItemsEqual = self.assertCountEqual
class WorkQueueProfileTest(WorkQueueTestCase): """ _WorkQueueTest_ """ def setUp(self): """ If we dont have a wmspec file create one Warning: For the real profiling test including spec generation. need to use real spec instead of using emulator generated spec which doesn't include couchDB access and cmssw access """ EmulatorHelper.setEmulators(phedex=True, dbs=True, siteDB=True, requestMgr=True) WorkQueueTestCase.setUp(self) self.cacheDir = tempfile.mkdtemp() self.specGenerator = WMSpecGenerator(self.cacheDir) self.specNamePrefix = "TestReReco_" self.specs = self.createReRecoSpec(5, "file") # Create queues self.globalQueue = globalQueue(DbName=self.globalQDB, InboxDbName=self.globalQInboxDB, NegotiationTimeout=0) def tearDown(self): """tearDown""" WorkQueueTestCase.tearDown(self) try: self.specGenerator.removeSpecs() except: pass EmulatorHelper.resetEmulators() def createReRecoSpec(self, numOfSpec, type="spec"): specs = [] for i in range(numOfSpec): specName = "%s%s" % (self.specNamePrefix, (i + 1)) specs.append(self.specGenerator.createReRecoSpec(specName, type)) return specs def createProfile(self, name, function): file = name prof = cProfile.Profile() prof.runcall(function) prof.dump_stats(file) p = pstats.Stats(file) p.strip_dirs().sort_stats("cumulative").print_stats(0.1) p.strip_dirs().sort_stats("time").print_stats(0.1) p.strip_dirs().sort_stats("calls").print_stats(0.1) # p.strip_dirs().sort_stats('name').print_stats(10) def testQueueElementProfile(self): self.createProfile("queueElementProfile.prof", self.multipleQueueWorkCall) def multipleQueueWorkCall(self): i = 0 for wmspec in self.specs: i += 1 self.globalQueue.queueWork(wmspec, self.specNamePrefix + str(i), "test_team")
class WorkQueueTest(unittest.TestCase): """ Test WorkQueue Service client It will start WorkQueue RESTService Server DB sets from environment variable. Client DB sets from environment variable. This checks whether DS call makes without error and return the results. Not the correctness of functions. That will be tested in different module. """ def setUp(self): """ _setUp_ """ EmulatorHelper.setEmulators(phedex = True, dbs = True, siteDB = True, requestMgr = True) self.specGenerator = WMSpecGenerator("WMSpecs") #self.configFile = EmulatorSetup.setupWMAgentConfig() self.schema = [] self.couchApps = ["WorkQueue"] self.testInit = TestInitCouchApp('WorkQueueServiceTest') self.testInit.setLogging() self.testInit.setDatabaseConnection() self.testInit.setSchema(customModules = self.schema, useDefault = False) self.testInit.setupCouch('workqueue_t', *self.couchApps) self.testInit.setupCouch('workqueue_t_inbox', *self.couchApps) return def tearDown(self): """ _tearDown_ Drop all the WMBS tables. """ self.testInit.tearDownCouch() #EmulatorSetup.deleteConfig(self.configFile) self.specGenerator.removeSpecs() def testWorkQueueService(self): # test getWork specName = "RerecoSpec" specUrl = self.specGenerator.createReRecoSpec(specName, "file") globalQ = globalQueue(DbName = 'workqueue_t', QueueURL = self.testInit.couchUrl) self.assertTrue(globalQ.queueWork(specUrl, "RerecoSpec", "teamA") > 0) wqApi = WorkQueueDS(self.testInit.couchUrl, 'workqueue_t') #This only checks minimum client call not exactly correctness of return # values. self.assertEqual(wqApi.getTopLevelJobsByRequest(), [{'total_jobs': 2, 'request_name': specName}]) self.assertEqual(wqApi.getChildQueues(), []) self.assertEqual(wqApi.getJobStatusByRequest(), [{'status': 'Available', 'jobs': 2, 'request_name': specName}]) self.assertEqual(wqApi.getChildQueuesByRequest(), []) self.assertEqual(wqApi.getWMBSUrl(), []) self.assertEqual(wqApi.getWMBSUrlByRequest(), [])
def createWorkload(self): """ Create a workload in order to test things """ generator = WMSpecGenerator() workload = generator.createReRecoSpec("Tier1ReReco") return workload
def setUp(self): """ _setUp_ Setup some reasonable defaults for the ReReco workflow. """ self.unmergedLFNBase = "/store/backfill/2/unmerged" self.mergedLFNBase = "/store/backfill/2" self.processingVersion = "v1" self.cmsswVersion = "CMSSW_3_4_2_patch1" self.acquisitionEra = "WMAgentCommissioining10" self.primaryDataset = "MinimumBias" self.workload = WMSpecGenerator().createReRecoSpec("Tier1ReReco") print self.workload.data return
class RequestManager(object): def __init__(self, *args, **kwargs): print "Using RequestManager Emulator ..." self.specGenerator = WMSpecGenerator() self.count = 0 self.maxWmSpec = 1 def getAssignment(self, teamName=None, request=None): if self.count < self.maxWmSpec: #specName = "FakeProductionSpec_%s" % self.count #specUrl =self.specGenerator.createProductionSpec(specName, "file") specName = "FakeProcessingSpec_%s" % self.count specUrl =self.specGenerator.createProcessingSpec(specName, "file") #specName = "ReRecoTest_v%sEmulator" % self.count #specUrl =self.specGenerator.create ReRecoSpec(specName, "file") self.count += 1 return {specName:specUrl} else: return {} def postAssignment(self, requestName, prodAgentUrl=None): # do not thing or return success of fail massage return
def setUp(self): """ If we dont have a wmspec file create one """ EmulatorHelper.setEmulators(phedex=False, dbs=False, siteDB=True, requestMgr=True) WorkQueueTestCase.setUp(self) self.cacheDir = tempfile.mkdtemp() self.specGenerator = WMSpecGenerator(self.cacheDir) self.specs = self.createReRecoSpec(1, "file") # Create queues self.localQueue = localQueue(DbName=self.queueDB, InboxDbName=self.queueInboxDB, \ NegotiationTimeout=0, QueueURL='global.example.com', CacheDir=self.cacheDir, central_logdb_url="%s/%s" %(self.couchURL, self.logDBName), log_reporter='lq_profile_test')
def setUp(self): """ If we dont have a wmspec file create one """ EmulatorHelper.setEmulators(phedex = True, dbs = True, siteDB = True, requestMgr = True) WorkQueueTestCase.setUp(self) self.cacheDir = tempfile.mkdtemp() self.specGenerator = WMSpecGenerator(self.cacheDir) self.specs = self.createReRecoSpec(1, "file") # Create queues self.localQueue = localQueue(DbName = self.queueDB, InboxDbName = self.queueInboxDB, NegotiationTimeout = 0, QueueURL = 'global.example.com', CacheDir = self.cacheDir)
def setUp(self): """ If we dont have a wmspec file create one Warning: For the real profiling test including spec generation. need to use real spec instead of using emulator generated spec which doesn't include couchDB access and cmssw access """ WorkQueueTestCase.setUp(self) self.cacheDir = tempfile.mkdtemp() self.specGenerator = WMSpecGenerator(self.cacheDir) self.specNamePrefix = "TestReReco_" self.specs = self.createReRecoSpec(5, "file") # Create queues self.globalQueue = globalQueue(DbName=self.globalQDB, InboxDbName=self.globalQInboxDB, NegotiationTimeout=0)
def __init__(self, *args, **kwargs): """ all the private valuable is defined for test values """ self.specGenerator = WMSpecGenerator() self.count = 0 self.maxWmSpec = kwargs.setdefault('numOfSpecs', 1) self.type = kwargs.setdefault("type", 'ReReco') if self.type not in ['ReReco', 'MonteCarlo']: raise TypeError, 'unknown request type %s' % self.type self.splitter = kwargs.setdefault('splitter', 'DatasetBlock') self.inputDataset = kwargs.setdefault('inputDataset', None) self.dbsUrl = kwargs.setdefault('dbsUrl', None) self.status = {} self.progress = {} self.msg = {} self.names = [] import logging self['logger'] = logging
def setUp(self): """ _setUp_ """ super(WorkQueueTest, self).setUp() self.specGenerator = WMSpecGenerator("WMSpecs") # self.configFile = EmulatorSetup.setupWMAgentConfig() self.schema = [] self.couchApps = ["WorkQueue"] self.testInit = TestInitCouchApp('WorkQueueServiceTest') self.testInit.setLogging() self.testInit.setDatabaseConnection() self.testInit.setSchema(customModules=self.schema, useDefault=False) self.testInit.setupCouch('workqueue_t', *self.couchApps) self.testInit.setupCouch('workqueue_t_inbox', *self.couchApps) self.testInit.setupCouch('local_workqueue_t', *self.couchApps) self.testInit.setupCouch('local_workqueue_t_inbox', *self.couchApps) self.testInit.generateWorkDir() return
def setUp(self): """ _setUp_ """ EmulatorHelper.setEmulators(phedex=True, dbs=True, siteDB=True, requestMgr=True) self.specGenerator = WMSpecGenerator("WMSpecs") #self.configFile = EmulatorSetup.setupWMAgentConfig() self.schema = [] self.couchApps = ["WorkQueue"] self.testInit = TestInitCouchApp('WorkQueueServiceTest') self.testInit.setLogging() self.testInit.setDatabaseConnection() self.testInit.setSchema(customModules=self.schema, useDefault=False) self.testInit.setupCouch('workqueue_t', *self.couchApps) self.testInit.setupCouch('workqueue_t_inbox', *self.couchApps) return
def setUp(self): """ If we dont have a wmspec file create one Warning: For the real profiling test including spec generation. need to use real spec instead of using emulator generated spec which doesn't include couchDB access and cmssw access """ EmulatorHelper.setEmulators(phedex=True, dbs=True, siteDB=True, requestMgr=True) WorkQueueTestCase.setUp(self) self.cacheDir = tempfile.mkdtemp() self.specGenerator = WMSpecGenerator(self.cacheDir) self.specNamePrefix = "TestReReco_" self.specs = self.createReRecoSpec(5, "file") # Create queues self.globalQueue = globalQueue(DbName=self.globalQDB, InboxDbName=self.globalQInboxDB, NegotiationTimeout=0)
def setUp(self): """ _setUp_ """ EmulatorHelper.setEmulators(phedex = True, dbs = True, siteDB = True, requestMgr = True) self.specGenerator = WMSpecGenerator("WMSpecs") #self.configFile = EmulatorSetup.setupWMAgentConfig() self.schema = [] self.couchApps = ["WorkQueue"] self.testInit = TestInitCouchApp('WorkQueueServiceTest') self.testInit.setLogging() self.testInit.setDatabaseConnection() self.testInit.setSchema(customModules = self.schema, useDefault = False) self.testInit.setupCouch('workqueue_t', *self.couchApps) self.testInit.setupCouch('workqueue_t_inbox', *self.couchApps) return
class LocalWorkQueueProfileTest(WorkQueueTestCase): """ _WorkQueueTest_ """ def setUp(self): """ If we dont have a wmspec file create one """ WorkQueueTestCase.setUp(self) self.cacheDir = tempfile.mkdtemp() self.specGenerator = WMSpecGenerator(self.cacheDir) self.specs = self.createReRecoSpec(1, "file") # Create queues self.localQueue = localQueue(DbName=self.queueDB, InboxDbName=self.queueInboxDB, NegotiationTimeout=0, QueueURL='global.example.com', CacheDir=self.cacheDir, central_logdb_url="%s/%s" % (self.couchURL, self.logDBName), log_reporter='lq_profile_test') def tearDown(self): """tearDown""" WorkQueueTestCase.tearDown(self) try: shutil.rmtree(self.cacheDir) self.specGenerator.removeSpecs() except Exception: pass def createReRecoSpec(self, numOfSpec, kind="spec"): specs = [] for i in range(numOfSpec): specName = "MinBiasProcessingSpec_Test_%s" % (i+1) specs.append(self.specGenerator.createReRecoSpec(specName, kind)) return specs def createProfile(self, name, function): fileName = name prof = cProfile.Profile() prof.runcall(function) prof.dump_stats(fileName) p = pstats.Stats(fileName) p.strip_dirs().sort_stats('cumulative').print_stats(0.1) p.strip_dirs().sort_stats('time').print_stats(0.1) p.strip_dirs().sort_stats('calls').print_stats(0.1) p.strip_dirs().sort_stats('name').print_stats(10) def testGetWorkLocalQueue(self): i = 0 for spec in self.specs: i += 1 specName = "MinBiasProcessingSpec_Test_%s" % i self.localQueue.queueWork(spec, specName, team="A-team") self.localQueue.updateLocationInfo() self.createProfile('getWorkProfile.prof', self.localQueueGetWork) def localQueueGetWork(self): siteJobs = {} for site in DUMMY_SITES: siteJobs[site] = 100000 self.localQueue.getWork(siteJobs, {})
class WorkQueueTest(EmulatedUnitTestCase): """ Test WorkQueue Service client It will start WorkQueue RESTService Server DB sets from environment variable. Client DB sets from environment variable. This checks whether DS call makes without error and return the results. Not the correctness of functions. That will be tested in different module. """ def setUp(self): """ _setUp_ """ super(WorkQueueTest, self).setUp() self.specGenerator = WMSpecGenerator("WMSpecs") # self.configFile = EmulatorSetup.setupWMAgentConfig() self.schema = [] self.couchApps = ["WorkQueue"] self.testInit = TestInitCouchApp('WorkQueueServiceTest') self.testInit.setLogging() self.testInit.setDatabaseConnection() self.testInit.setSchema(customModules=self.schema, useDefault=False) self.testInit.setupCouch('workqueue_t', *self.couchApps) self.testInit.setupCouch('workqueue_t_inbox', *self.couchApps) self.testInit.setupCouch('local_workqueue_t', *self.couchApps) self.testInit.setupCouch('local_workqueue_t_inbox', *self.couchApps) self.testInit.generateWorkDir() return def tearDown(self): """ _tearDown_ Drop all the WMBS tables. """ self.testInit.tearDownCouch() self.specGenerator.removeSpecs() super(WorkQueueTest, self).tearDown() def testWorkQueueService(self): # test getWork specName = "RerecoSpec" specUrl = self.specGenerator.createReRecoSpec( specName, "file", assignKwargs={'SiteWhitelist': ['T2_XX_SiteA']}) globalQ = globalQueue(DbName='workqueue_t', QueueURL=self.testInit.couchUrl, UnittestFlag=True) self.assertTrue(globalQ.queueWork(specUrl, specName, "teamA") > 0) wqApi = WorkQueueDS(self.testInit.couchUrl, 'workqueue_t') # overwrite default - can't test with stale view wqApi.defaultOptions = {'reduce': True, 'group': True} # This only checks minimum client call not exactly correctness of return # values. self.assertEqual(wqApi.getTopLevelJobsByRequest(), [{ 'total_jobs': 339, 'request_name': specName }]) # work still available, so no childQueue results = wqApi.getChildQueuesAndStatus() self.assertItemsEqual(set([item['agent_name'] for item in results]), ["AgentNotDefined"]) result = wqApi.getElementsCountAndJobsByWorkflow() self.assertEqual(len(result), 1) self.assertEqual(result[specName]['Available']['Jobs'], 339) results = wqApi.getChildQueuesAndPriority() resultsPrio = set([ item['priority'] for item in results if item['agent_name'] == "AgentNotDefined" ]) self.assertItemsEqual(resultsPrio, [8000]) self.assertEqual(wqApi.getWMBSUrl(), []) self.assertEqual(wqApi.getWMBSUrlByRequest(), []) def testUpdatePriorityService(self): """ _testUpdatePriorityService_ Check that we can update the priority correctly also check the available workflows feature """ specName = "RerecoSpec" specUrl = self.specGenerator.createReRecoSpec( specName, "file", assignKwargs={'SiteWhitelist': ["T2_XX_SiteA"]}) globalQ = globalQueue(DbName='workqueue_t', QueueURL=self.testInit.couchUrl, UnittestFlag=True) localQ = localQueue(DbName='local_workqueue_t', QueueURL=self.testInit.couchUrl, CacheDir=self.testInit.testDir, ParentQueueCouchUrl='%s/workqueue_t' % self.testInit.couchUrl, ParentQueueInboxCouchDBName='workqueue_t_inbox') # Try a full chain of priority update and propagation self.assertTrue(globalQ.queueWork(specUrl, "RerecoSpec", "teamA") > 0) globalApi = WorkQueueDS(self.testInit.couchUrl, 'workqueue_t') # overwrite default - can't test with stale view globalApi.defaultOptions = {'reduce': True, 'group': True} globalApi.updatePriority(specName, 100) self.assertEqual(globalQ.backend.getWMSpec(specName).priority(), 100) storedElements = globalQ.backend.getElementsForWorkflow(specName) for element in storedElements: self.assertEqual(element['Priority'], 100) numWorks = localQ.pullWork({'T2_XX_SiteA': 10}) self.assertTrue(numWorks > 0) # replicate from GQ to LQ manually localQ.backend.pullFromParent(continuous=False) # wait until replication is done time.sleep(2) localQ.processInboundWork(continuous=False) storedElements = localQ.backend.getElementsForWorkflow(specName) for element in storedElements: self.assertEqual(element['Priority'], 100) localApi = WorkQueueDS(self.testInit.couchUrl, 'local_workqueue_t') # overwrite default - can't test with stale view localApi.defaultOptions = {'reduce': True, 'group': True} localApi.updatePriority(specName, 500) self.assertEqual(localQ.backend.getWMSpec(specName).priority(), 500) storedElements = localQ.backend.getElementsForWorkflow(specName) for element in storedElements: self.assertEqual(element['Priority'], 500) availableWF = localApi.getAvailableWorkflows() self.assertEqual(availableWF, set([(specName, 500)])) # Attempt to update an inexistent workflow in the queue try: globalApi.updatePriority('NotExistent', 2) except Exception as ex: self.fail('No exception should be raised.: %s' % str(ex)) def testCompletedWorkflow(self): # test getWork specName = "RerecoSpec" specUrl = self.specGenerator.createReRecoSpec( specName, "file", assignKwargs={'SiteWhitelist': ['T2_XX_SiteA']}) globalQ = globalQueue(DbName='workqueue_t', QueueURL=self.testInit.couchUrl, UnittestFlag=True) self.assertTrue(globalQ.queueWork(specUrl, specName, "teamA") > 0) wqApi = WorkQueueDS(self.testInit.couchUrl, 'workqueue_t') # overwrite default - can't test with stale view wqApi.defaultOptions = {'reduce': True, 'group': True} # This only checks minimum client call not exactly correctness of return # values. self.assertEqual(wqApi.getTopLevelJobsByRequest(), [{ 'total_jobs': 339, 'request_name': specName }]) results = wqApi.getJobsByStatus() self.assertEqual(results['Available']['sum_jobs'], 339) results = wqApi.getJobsByStatusAndPriority() resultsPrio = set( [item['priority'] for item in results.get('Available')]) self.assertItemsEqual(resultsPrio, [8000]) resultsJobs = sum([ item['sum_jobs'] for item in results.get('Available') if item['priority'] == 8000 ]) self.assertTrue(resultsJobs, 339) result = wqApi.getElementsCountAndJobsByWorkflow() self.assertEqual(len(result), 1) self.assertEqual(result[specName]['Available']['Jobs'], 339) data = wqApi.db.loadView('WorkQueue', 'elementsDetailByWorkflowAndStatus', { 'startkey': [specName], 'endkey': [specName, {}], 'reduce': False }) elements = [x['id'] for x in data.get('rows', [])] wqApi.updateElements(*elements, Status='Canceled') # load this view once again to make sure it will be updated in the next assert.. data = wqApi.db.loadView('WorkQueue', 'elementsDetailByWorkflowAndStatus', { 'startkey': [specName], 'endkey': [specName, {}], 'reduce': False }) self.assertEqual(len(wqApi.getCompletedWorkflow(stale=False)), 1) results = wqApi.getJobsByStatusAndPriority() resultsPrio = set( [item['priority'] for item in results.get('Canceled')]) self.assertItemsEqual(resultsPrio, [8000]) def testConvertWQElementsStatusToWFStatus(self): """ _testConvertWQElementsStatusToWFStatus_ Check that a set of all the workqueue element status in a request properly maps to a single state to trigger the ReqMgr request transition. """ # workflows acquired by global_workqueue (nothing acquired by agents so far) self.assertEqual(convertWQElementsStatusToWFStatus(set(["Available"])), "acquired") self.assertEqual( convertWQElementsStatusToWFStatus(set(["Negotiating"])), "acquired") self.assertEqual( convertWQElementsStatusToWFStatus(set(["Available", "Negotiating"])), "acquired") # workflows not completely acquired yet by the agents self.assertEqual(convertWQElementsStatusToWFStatus(set(["Acquired"])), "running-open") self.assertEqual( convertWQElementsStatusToWFStatus( set(["Available", "Negotiating", "Acquired"])), "running-open") self.assertEqual( convertWQElementsStatusToWFStatus( set(["Available", "Negotiating", "Acquired", "Running"])), "running-open") self.assertEqual( convertWQElementsStatusToWFStatus( set([ "Available", "Negotiating", "Acquired", "Running", "Done" ])), "running-open") self.assertEqual( convertWQElementsStatusToWFStatus( set([ "Available", "Negotiating", "Acquired", "Running", "Done", "CancelRequested" ])), "running-open") self.assertEqual( convertWQElementsStatusToWFStatus( set([ "Available", "Negotiating", "Acquired", "Running", "Done", "CancelRequested", "Canceled" ])), "running-open") self.assertEqual( convertWQElementsStatusToWFStatus(set(["Negotiating", "Acquired"])), "running-open") self.assertEqual( convertWQElementsStatusToWFStatus( set(["Negotiating", "Acquired", "Running"])), "running-open") self.assertEqual( convertWQElementsStatusToWFStatus( set(["Negotiating", "Acquired", "Running", "Done"])), "running-open") self.assertEqual( convertWQElementsStatusToWFStatus( set([ "Negotiating", "Acquired", "Running", "Done", "CancelRequested" ])), "running-open") self.assertEqual( convertWQElementsStatusToWFStatus( set([ "Negotiating", "Acquired", "Running", "Done", "CancelRequested", "Canceled" ])), "running-open") self.assertEqual( convertWQElementsStatusToWFStatus(set(["Acquired", "Running"])), "running-open") self.assertEqual( convertWQElementsStatusToWFStatus( set(["Acquired", "Running", "Done"])), "running-open") self.assertEqual( convertWQElementsStatusToWFStatus( set(["Acquired", "Running", "Done", "CancelRequested"])), "running-open") self.assertEqual( convertWQElementsStatusToWFStatus( set([ "Acquired", "Running", "Done", "CancelRequested", "Canceled" ])), "running-open") # workflows completely acquired by the agents self.assertEqual(convertWQElementsStatusToWFStatus(set(["Running"])), "running-closed") self.assertEqual( convertWQElementsStatusToWFStatus(set(["Running", "Done"])), "running-closed") self.assertEqual( convertWQElementsStatusToWFStatus( set(["Running", "Done", "CancelRequested"])), "running-closed") self.assertEqual( convertWQElementsStatusToWFStatus( set(["Running", "Done", "CancelRequested", "Canceled"])), "running-closed") self.assertEqual( convertWQElementsStatusToWFStatus( set(["Running", "Done", "Canceled"])), "running-closed") # workflows completed/aborted/force-completed, thus existent elements # but no more active workqueue elements in the system self.assertEqual(convertWQElementsStatusToWFStatus(set(["Done"])), "completed") self.assertEqual(convertWQElementsStatusToWFStatus(set(["Canceled"])), "completed") self.assertEqual( convertWQElementsStatusToWFStatus(set(["Done", "Canceled"])), "completed") # workflows failed self.assertEqual(convertWQElementsStatusToWFStatus(set(["Failed"])), "failed") # non-failed workflows but with Failed elements self.assertEqual( convertWQElementsStatusToWFStatus( set(["Available", "Negotiating", "Acquired", "Failed"])), "running-open") self.assertEqual( convertWQElementsStatusToWFStatus( set([ "Available", "Negotiating", "Acquired", "Running", "Done", "Failed" ])), "running-open") self.assertEqual( convertWQElementsStatusToWFStatus( set([ "Negotiating", "Acquired", "Running", "Done", "Canceled", "Failed" ])), "running-open") self.assertEqual( convertWQElementsStatusToWFStatus(set(["Running", "Failed"])), "running-closed") self.assertEqual( convertWQElementsStatusToWFStatus( set(["Running", "Done", "Canceled", "Failed"])), "running-closed") self.assertEqual( convertWQElementsStatusToWFStatus(set(["Done", "Failed"])), "completed") self.assertEqual( convertWQElementsStatusToWFStatus(set(["Canceled", "Failed"])), "completed") # workflows in a temporary state, nothing to do with them yet self.assertIsNone( convertWQElementsStatusToWFStatus(set(["Done", "CancelRequested"]))) self.assertIsNone( convertWQElementsStatusToWFStatus(set(["CancelRequested"])))
class WorkQueueTest(unittest.TestCase): """ Test WorkQueue Service client It will start WorkQueue RESTService Server DB sets from environment variable. Client DB sets from environment variable. This checks whether DS call makes without error and return the results. Not the correctness of functions. That will be tested in different module. """ def setUp(self): """ _setUp_ """ EmulatorHelper.setEmulators(phedex=True, dbs=True, siteDB=True, requestMgr=True) self.specGenerator = WMSpecGenerator("WMSpecs") #self.configFile = EmulatorSetup.setupWMAgentConfig() self.schema = [] self.couchApps = ["WorkQueue"] self.testInit = TestInitCouchApp('WorkQueueServiceTest') self.testInit.setLogging() self.testInit.setDatabaseConnection() self.testInit.setSchema(customModules=self.schema, useDefault=False) self.testInit.setupCouch('workqueue_t', *self.couchApps) self.testInit.setupCouch('workqueue_t_inbox', *self.couchApps) self.testInit.setupCouch('local_workqueue_t', *self.couchApps) self.testInit.setupCouch('local_workqueue_t_inbox', *self.couchApps) self.testInit.generateWorkDir() return def tearDown(self): """ _tearDown_ Drop all the WMBS tables. """ self.testInit.tearDownCouch() EmulatorHelper.resetEmulators() self.specGenerator.removeSpecs() def testWorkQueueService(self): # test getWork specName = "RerecoSpec" specUrl = self.specGenerator.createReRecoSpec(specName, "file") globalQ = globalQueue(DbName='workqueue_t', QueueURL=self.testInit.couchUrl) self.assertTrue(globalQ.queueWork(specUrl, "RerecoSpec", "teamA") > 0) wqApi = WorkQueueDS(self.testInit.couchUrl, 'workqueue_t') #overwrite default - can't test with stale view wqApi.defaultOptions = {'reduce': True, 'group': True} #This only checks minimum client call not exactly correctness of return # values. self.assertEqual(wqApi.getTopLevelJobsByRequest(), [{ 'total_jobs': 10, 'request_name': specName }]) self.assertEqual(wqApi.getChildQueues(), []) self.assertEqual(wqApi.getJobStatusByRequest(), [{ 'status': 'Available', 'jobs': 10, 'request_name': specName }]) self.assertEqual(wqApi.getChildQueuesByRequest(), []) self.assertEqual(wqApi.getWMBSUrl(), []) self.assertEqual(wqApi.getWMBSUrlByRequest(), []) def testUpdatePriorityService(self): """ _testUpdatePriorityService_ Check that we can update the priority correctly also check the available workflows feature """ specName = "RerecoSpec" specUrl = self.specGenerator.createReRecoSpec(specName, "file") globalQ = globalQueue(DbName='workqueue_t', QueueURL=self.testInit.couchUrl) localQ = localQueue(DbName='local_workqueue_t', QueueURL=self.testInit.couchUrl, CacheDir=self.testInit.testDir, ParentQueueCouchUrl='%s/workqueue_t' % self.testInit.couchUrl, ParentQueueInboxCouchDBName='workqueue_t_inbox') # Try a full chain of priority update and propagation self.assertTrue(globalQ.queueWork(specUrl, "RerecoSpec", "teamA") > 0) globalApi = WorkQueueDS(self.testInit.couchUrl, 'workqueue_t') #overwrite default - can't test with stale view globalApi.defaultOptions = {'reduce': True, 'group': True} globalApi.updatePriority(specName, 100) self.assertEqual(globalQ.backend.getWMSpec(specName).priority(), 100) storedElements = globalQ.backend.getElementsForWorkflow(specName) for element in storedElements: self.assertEqual(element['Priority'], 100) self.assertTrue(localQ.pullWork({'T2_XX_SiteA': 10}) > 0) localQ.processInboundWork(continuous=False) storedElements = localQ.backend.getElementsForWorkflow(specName) for element in storedElements: self.assertEqual(element['Priority'], 100) localApi = WorkQueueDS(self.testInit.couchUrl, 'local_workqueue_t') #overwrite default - can't test with stale view localApi.defaultOptions = {'reduce': True, 'group': True} localApi.updatePriority(specName, 500) self.assertEqual(localQ.backend.getWMSpec(specName).priority(), 500) storedElements = localQ.backend.getElementsForWorkflow(specName) for element in storedElements: self.assertEqual(element['Priority'], 500) self.assertEqual(localApi.getAvailableWorkflows(), set([(specName, 500)])) # Attempt to update an inexistent workflow in the queue try: globalApi.updatePriority('NotExistent', 2) except: self.fail('No exception should be raised.')
class RequestManager(dict): def __init__(self, *args, **kwargs): """ all the private valuable is defined for test values """ self.specGenerator = WMSpecGenerator() self.count = 0 self.maxWmSpec = kwargs.setdefault('numOfSpecs', 1) self.type = kwargs.setdefault("type", 'ReReco') if self.type not in ['ReReco', 'MonteCarlo']: raise TypeError, 'unknown request type %s' % self.type self.splitter = kwargs.setdefault('splitter', 'DatasetBlock') self.inputDataset = kwargs.setdefault('inputDataset', None) self.dbsUrl = kwargs.setdefault('dbsUrl', None) self.status = {} self.progress = {} self.msg = {} self.names = [] import logging self['logger'] = logging def getAssignment(self, teamName=None, request=None): if self.count < self.maxWmSpec: if self.type == 'ReReco': specName = "ReRecoTest_v%sEmulator" % self.count specUrl =self.specGenerator.createReRecoSpec(specName, "file", self.splitter, self.inputDataset, self.dbsUrl) elif self.type == 'MonteCarlo': specName = "MCTest_v%sEmulator" % self.count specUrl =self.specGenerator.createMCSpec(specName, "file", self.splitter) self.names.append(specName) self.status[specName] = 'assigned' #specName = "FakeProductionSpec_%s" % self.count #specUrl =self.specGenerator.createProductionSpec(specName, "file") #specName = "FakeProcessingSpec_%s" % self.count #specUrl =self.specGenerator.createProcessingSpec(specName, "file") self.count += 1 # returns list of list(rquest name, spec url) return [[specName, specUrl],] else: return [] def putWorkQueue(self, reqName, prodAgentUrl=None): self.status[reqName] = 'acquired' def reportRequestStatus(self, name, status): if status not in NextStatus[self.status[name]]: raise RuntimeError, "Invalid status move: %s" % status self.status[name] = status def reportRequestProgress(self, name, **args): self.progress.setdefault(name, {}) self.progress[name].update(args) def sendMessage(self, request, msg): self.msg[request] = msg def _removeSpecs(self): """ This is just for clean up not part of emulated function """ self.specGenerator.removeSpecs()
class LocalWorkQueueProfileTest(WorkQueueTestCase): """ _WorkQueueTest_ """ def setUp(self): """ If we dont have a wmspec file create one """ WorkQueueTestCase.setUp(self) self.cacheDir = tempfile.mkdtemp() self.specGenerator = WMSpecGenerator(self.cacheDir) self.specs = self.createReRecoSpec(1, "file") # setup rucio parameters for global/local queue self.queueParams = {} self.queueParams['log_reporter'] = "lq_profile_test" self.queueParams['rucioAccount'] = "wma_test" self.queueParams['rucioAuthUrl'] = "http://cmsrucio-int.cern.ch" self.queueParams['rucioUrl'] = "https://cmsrucio-auth-int.cern.ch" # Create queues self.localQueue = localQueue(DbName=self.queueDB, InboxDbName=self.queueInboxDB, NegotiationTimeout=0, QueueURL='global.example.com', CacheDir=self.cacheDir, central_logdb_url="%s/%s" % (self.couchURL, self.logDBName), **self.queueParams) def tearDown(self): """tearDown""" WorkQueueTestCase.tearDown(self) try: shutil.rmtree(self.cacheDir) self.specGenerator.removeSpecs() except Exception: pass def createReRecoSpec(self, numOfSpec, kind="spec"): specs = [] for i in range(numOfSpec): specName = "MinBiasProcessingSpec_Test_%s" % (i + 1) specs.append( self.specGenerator.createReRecoSpec( specName, kind, assignKwargs={'SiteWhitelist': ['T2_XX_SiteA']})) return specs def createProfile(self, name, function): fileName = name prof = cProfile.Profile() prof.runcall(function) prof.dump_stats(fileName) p = pstats.Stats(fileName) p.strip_dirs().sort_stats('cumulative').print_stats(0.1) p.strip_dirs().sort_stats('time').print_stats(0.1) p.strip_dirs().sort_stats('calls').print_stats(0.1) p.strip_dirs().sort_stats('name').print_stats(10) def testGetWorkLocalQueue(self): i = 0 for spec in self.specs: i += 1 specName = "MinBiasProcessingSpec_Test_%s" % i self.localQueue.queueWork(spec, specName, team="A-team") self.localQueue.updateLocationInfo() self.createProfile('getWorkProfile.prof', self.localQueueGetWork) def localQueueGetWork(self): siteJobs = {} for site in DUMMY_SITES: siteJobs[site] = 100000 self.localQueue.getWork(siteJobs, {})
class ReportEmuTest(unittest.TestCase): """ _ReportEmuTest_ """ def setUp(self): """ _setUp_ Setup some reasonable defaults for the ReReco workflow. """ self.unmergedLFNBase = "/store/backfill/2/unmerged" self.mergedLFNBase = "/store/backfill/2" self.processingVersion = "v1" self.cmsswVersion = "CMSSW_3_4_2_patch1" self.acquisitionEra = "WMAgentCommissioining10" self.primaryDataset = "MinimumBias" self.workload = WMSpecGenerator().createReRecoSpec("Tier1ReReco") print self.workload.data return def verifyOutputMetaData(self, outputFile, job): """ _verifyOutputMetaData_ Verify that metadata that in an emulated FWJR. Most of the meta data should be the same as the input file. """ goldenRuns = [] for inputFile in job["input_files"]: for run in inputFile["runs"]: goldenRuns.append(Run(run.run, *run.lumis)) assert len(outputFile["runs"]) == len(goldenRuns), \ "Error: Wrong number of runs in output file." for outputRun in outputFile["runs"]: for goldenRun in goldenRuns: if outputRun.run == goldenRun.run: goldenRun.lumis.sort() outputRun.lumis.sort() if goldenRun.lumis == outputRun.lumis: goldenRuns.remove(goldenRun) break assert len(goldenRuns) == 0, \ "Error: Run information wrong on output file." assert len(outputFile["locations"]) == 1, \ "Error: Wrong number of locations." assert list(outputFile["locations"])[0] == job["location"], \ "Error: Output file at the wrong location." assert outputFile["merged"] == False, \ "Error: Output should be unmerged." assert "adler32" in outputFile["checksums"].keys(), \ "Error: Adler32 checksum missing." assert "cksum" in outputFile["checksums"].keys(), \ "Error: CKSum checksum missing." return def testProcessing(self): """ _testProcessing_ Setup a processing workflow and job and verify that the FWJR produced by the emulator is reasonable. """ rerecoTask = self.workload.getTask("DataProcessing") cmsRunStep = rerecoTask.getStep("cmsRun1") inputFile = File(lfn = "/path/to/test/lfn", size = 1048576, events = 1000, merged = True) inputFile.addRun(Run(1, *[1, 2, 3, 4, 5])) inputFile.addRun(Run(2, *[1, 2, 3, 4, 5, 6])) processingJob = Job(name = "ProcessingJob", files = [inputFile]) processingJob["task"] = "/Tier1ReReco/ReReco" processingJob["mask"].setMaxAndSkipEvents(500, 0) processingJob["id"] = 1 processingJob["location"] = "cmssrm.fnal.gov" emu = ReportEmu(WMStep = cmsRunStep.getTypeHelper(), Job = processingJob) report = emu() reportInputFiles = report.getInputFilesFromStep("cmsRun1") assert len(reportInputFiles) == 1, \ "Error: Wrong number of input files for the job." assert reportInputFiles[0]["lfn"] == inputFile["lfn"], \ "Error: Input LFNs do not match: %s" % reportInputFiles[0]["lfn"] assert reportInputFiles[0]["size"] == inputFile["size"], \ "Error: Input file sizes do not match." assert reportInputFiles[0]["events"] == inputFile["events"], \ "Error: Input file events do not match." goldenRuns = [Run(1, *[1, 2, 3, 4, 5]), Run(2, *[1, 2, 3, 4, 5, 6])] assert len(reportInputFiles[0]["runs"]) == len(goldenRuns), \ "Error: Wrong number of runs in input file." for inputRun in reportInputFiles[0]["runs"]: for goldenRun in goldenRuns: if inputRun.run == goldenRun.run: goldenRun.lumis.sort() inputRun.lumis.sort() if goldenRun.lumis == inputRun.lumis: goldenRuns.remove(goldenRun) break assert len(goldenRuns) == 0, \ "Error: Run information wrong on input file." recoOutputFiles = report.getFilesFromOutputModule("cmsRun1", "RECOoutput") alcaOutputFiles = report.getFilesFromOutputModule("cmsRun1", "ALCARECOoutput") assert len(recoOutputFiles) == 1, \ "Error: There should only be one RECO output file." assert len(alcaOutputFiles) == 1, \ "Error: There should only be one ALCA output file." assert recoOutputFiles[0]["module_label"] == "RECOoutput", \ "Error: RECO file has wrong output module." assert alcaOutputFiles[0]["module_label"] == "ALCARECOoutput", \ "Error: ALCA file has wrong output module." self.verifyOutputMetaData(recoOutputFiles[0], processingJob) self.verifyOutputMetaData(alcaOutputFiles[0], processingJob) dataTierMap = {"RECOoutput": "RECO", "ALCARECOoutput": "ALCARECO"} for outputFile in [recoOutputFiles[0], alcaOutputFiles[0]]: assert outputFile["dataset"]["applicationName"] == "cmsRun", \ "Error: Application name is incorrect." assert outputFile["dataset"]["primaryDataset"] == self.primaryDataset, \ "Error: Primary dataset is incorrect." assert outputFile["dataset"]["dataTier"] == dataTierMap[outputFile["module_label"]], \ "Error: Data tier is incorrect." return def testMerge(self): """ _testMerge_ Setup a merge workflow and job and verify that the FWJR produced by the emulator is reasonable. """ #emu = ReportEmu(WMStep = self.cmssw, Job = self.job) #report = emu() return
class WorkQueueTest(EmulatedUnitTestCase): """ Test WorkQueue Service client It will start WorkQueue RESTService Server DB sets from environment variable. Client DB sets from environment variable. This checks whether DS call makes without error and return the results. Not the correctness of functions. That will be tested in different module. """ def setUp(self): """ _setUp_ """ super(WorkQueueTest, self).setUp() self.specGenerator = WMSpecGenerator("WMSpecs") # self.configFile = EmulatorSetup.setupWMAgentConfig() self.schema = [] self.couchApps = ["WorkQueue"] self.testInit = TestInitCouchApp('WorkQueueServiceTest') self.testInit.setLogging() self.testInit.setDatabaseConnection() self.testInit.setSchema(customModules=self.schema, useDefault=False) self.testInit.setupCouch('workqueue_t', *self.couchApps) self.testInit.setupCouch('workqueue_t_inbox', *self.couchApps) self.testInit.setupCouch('local_workqueue_t', *self.couchApps) self.testInit.setupCouch('local_workqueue_t_inbox', *self.couchApps) self.testInit.generateWorkDir() return def tearDown(self): """ _tearDown_ Drop all the WMBS tables. """ self.testInit.tearDownCouch() self.specGenerator.removeSpecs() super(WorkQueueTest, self).tearDown() def testWorkQueueService(self): # test getWork specName = "RerecoSpec" specUrl = self.specGenerator.createReRecoSpec(specName, "file", assignKwargs={'SiteWhitelist': ['T2_XX_SiteA']}) globalQ = globalQueue(DbName='workqueue_t', QueueURL=self.testInit.couchUrl, UnittestFlag=True) self.assertTrue(globalQ.queueWork(specUrl, specName, "teamA") > 0) wqApi = WorkQueueDS(self.testInit.couchUrl, 'workqueue_t') # overwrite default - can't test with stale view wqApi.defaultOptions = {'reduce': True, 'group': True} # This only checks minimum client call not exactly correctness of return # values. self.assertEqual(wqApi.getTopLevelJobsByRequest(), [{'total_jobs': 339, 'request_name': specName}]) # work still available, so no childQueue results = wqApi.getChildQueuesAndStatus() self.assertItemsEqual(set([item['agent_name'] for item in results]), ["AgentNotDefined"]) result = wqApi.getElementsCountAndJobsByWorkflow() self.assertEqual(len(result), 1) self.assertEqual(result[specName]['Available']['Jobs'], 339) results = wqApi.getChildQueuesAndPriority() resultsPrio = set([item['priority'] for item in results if item['agent_name'] == "AgentNotDefined"]) self.assertItemsEqual(resultsPrio, [8000]) self.assertEqual(wqApi.getWMBSUrl(), []) self.assertEqual(wqApi.getWMBSUrlByRequest(), []) def testUpdatePriorityService(self): """ _testUpdatePriorityService_ Check that we can update the priority correctly also check the available workflows feature """ specName = "RerecoSpec" specUrl = self.specGenerator.createReRecoSpec(specName, "file", assignKwargs={'SiteWhitelist':["T2_XX_SiteA"]}) globalQ = globalQueue(DbName='workqueue_t', QueueURL=self.testInit.couchUrl, UnittestFlag=True) localQ = localQueue(DbName='local_workqueue_t', QueueURL=self.testInit.couchUrl, CacheDir=self.testInit.testDir, ParentQueueCouchUrl='%s/workqueue_t' % self.testInit.couchUrl, ParentQueueInboxCouchDBName='workqueue_t_inbox' ) # Try a full chain of priority update and propagation self.assertTrue(globalQ.queueWork(specUrl, "RerecoSpec", "teamA") > 0) globalApi = WorkQueueDS(self.testInit.couchUrl, 'workqueue_t') # overwrite default - can't test with stale view globalApi.defaultOptions = {'reduce': True, 'group': True} globalApi.updatePriority(specName, 100) self.assertEqual(globalQ.backend.getWMSpec(specName).priority(), 100) storedElements = globalQ.backend.getElementsForWorkflow(specName) for element in storedElements: self.assertEqual(element['Priority'], 100) numWorks = localQ.pullWork({'T2_XX_SiteA': 10}) self.assertTrue(numWorks > 0) # replicate from GQ to LQ manually localQ.backend.pullFromParent(continuous=False) # wait until replication is done time.sleep(2) localQ.processInboundWork(continuous=False) storedElements = localQ.backend.getElementsForWorkflow(specName) for element in storedElements: self.assertEqual(element['Priority'], 100) localApi = WorkQueueDS(self.testInit.couchUrl, 'local_workqueue_t') # overwrite default - can't test with stale view localApi.defaultOptions = {'reduce': True, 'group': True} localApi.updatePriority(specName, 500) self.assertEqual(localQ.backend.getWMSpec(specName).priority(), 500) storedElements = localQ.backend.getElementsForWorkflow(specName) for element in storedElements: self.assertEqual(element['Priority'], 500) availableWF = localApi.getAvailableWorkflows() self.assertEqual(availableWF, set([(specName, 500)])) # Attempt to update an inexistent workflow in the queue try: globalApi.updatePriority('NotExistent', 2) except Exception as ex: self.fail('No exception should be raised.: %s' % str(ex)) def testCompletedWorkflow(self): # test getWork specName = "RerecoSpec" specUrl = self.specGenerator.createReRecoSpec(specName, "file", assignKwargs={'SiteWhitelist':['T2_XX_SiteA']}) globalQ = globalQueue(DbName='workqueue_t', QueueURL=self.testInit.couchUrl, UnittestFlag=True) self.assertTrue(globalQ.queueWork(specUrl, specName, "teamA") > 0) wqApi = WorkQueueDS(self.testInit.couchUrl, 'workqueue_t') # overwrite default - can't test with stale view wqApi.defaultOptions = {'reduce': True, 'group': True} # This only checks minimum client call not exactly correctness of return # values. self.assertEqual(wqApi.getTopLevelJobsByRequest(), [{'total_jobs': 339, 'request_name': specName}]) results = wqApi.getJobsByStatus() self.assertEqual(results['Available']['sum_jobs'], 339) results = wqApi.getJobsByStatusAndPriority() resultsPrio = set([item['priority'] for item in results.get('Available')]) self.assertItemsEqual(resultsPrio, [8000]) resultsJobs = sum([item['sum_jobs'] for item in results.get('Available') if item['priority'] == 8000]) self.assertTrue(resultsJobs, 339) result = wqApi.getElementsCountAndJobsByWorkflow() self.assertEqual(len(result), 1) self.assertEqual(result[specName]['Available']['Jobs'], 339) data = wqApi.db.loadView('WorkQueue', 'elementsDetailByWorkflowAndStatus', {'startkey': [specName], 'endkey': [specName, {}], 'reduce': False}) elements = [x['id'] for x in data.get('rows', [])] wqApi.updateElements(*elements, Status='Canceled') # load this view once again to make sure it will be updated in the next assert.. data = wqApi.db.loadView('WorkQueue', 'elementsDetailByWorkflowAndStatus', {'startkey': [specName], 'endkey': [specName, {}], 'reduce': False}) self.assertEqual(len(wqApi.getCompletedWorkflow(stale=False)), 1) results = wqApi.getJobsByStatusAndPriority() resultsPrio = set([item['priority'] for item in results.get('Canceled')]) self.assertItemsEqual(resultsPrio, [8000]) def testConvertWQElementsStatusToWFStatus(self): """ _testConvertWQElementsStatusToWFStatus_ Check that a set of all the workqueue element status in a request properly maps to a single state to trigger the ReqMgr request transition. """ # workflows acquired by global_workqueue (nothing acquired by agents so far) self.assertEqual(convertWQElementsStatusToWFStatus(set(["Available"])), "acquired") self.assertEqual(convertWQElementsStatusToWFStatus(set(["Negotiating"])), "acquired") self.assertEqual(convertWQElementsStatusToWFStatus(set(["Available", "Negotiating"])), "acquired") # workflows not completely acquired yet by the agents self.assertEqual(convertWQElementsStatusToWFStatus(set(["Acquired"])), "running-open") self.assertEqual(convertWQElementsStatusToWFStatus(set(["Available", "Negotiating", "Acquired"])), "running-open") self.assertEqual(convertWQElementsStatusToWFStatus(set(["Available", "Negotiating", "Acquired", "Running"])), "running-open") self.assertEqual(convertWQElementsStatusToWFStatus(set(["Available", "Negotiating", "Acquired", "Running", "Done"])), "running-open") self.assertEqual(convertWQElementsStatusToWFStatus(set(["Available", "Negotiating", "Acquired", "Running", "Done", "CancelRequested"])), "running-open") self.assertEqual(convertWQElementsStatusToWFStatus(set(["Available", "Negotiating", "Acquired", "Running", "Done", "CancelRequested", "Canceled"])), "running-open") self.assertEqual(convertWQElementsStatusToWFStatus(set(["Negotiating", "Acquired"])), "running-open") self.assertEqual(convertWQElementsStatusToWFStatus(set(["Negotiating", "Acquired", "Running"])), "running-open") self.assertEqual(convertWQElementsStatusToWFStatus(set(["Negotiating", "Acquired", "Running", "Done"])), "running-open") self.assertEqual(convertWQElementsStatusToWFStatus(set(["Negotiating", "Acquired", "Running", "Done", "CancelRequested"])), "running-open") self.assertEqual(convertWQElementsStatusToWFStatus(set(["Negotiating", "Acquired", "Running", "Done", "CancelRequested", "Canceled"])), "running-open") self.assertEqual(convertWQElementsStatusToWFStatus(set(["Acquired", "Running"])), "running-open") self.assertEqual(convertWQElementsStatusToWFStatus(set(["Acquired", "Running", "Done"])), "running-open") self.assertEqual(convertWQElementsStatusToWFStatus(set(["Acquired", "Running", "Done", "CancelRequested"])), "running-open") self.assertEqual(convertWQElementsStatusToWFStatus(set(["Acquired", "Running", "Done", "CancelRequested", "Canceled"])), "running-open") # workflows completely acquired by the agents self.assertEqual(convertWQElementsStatusToWFStatus(set(["Running"])), "running-closed") self.assertEqual(convertWQElementsStatusToWFStatus(set(["Running", "Done"])), "running-closed") self.assertEqual(convertWQElementsStatusToWFStatus(set(["Running", "Done", "CancelRequested"])), "running-closed") self.assertEqual(convertWQElementsStatusToWFStatus(set(["Running", "Done", "CancelRequested", "Canceled"])), "running-closed") self.assertEqual(convertWQElementsStatusToWFStatus(set(["Running", "Done", "Canceled"])), "running-closed") # workflows completed/aborted/force-completed, thus existent elements # but no more active workqueue elements in the system self.assertEqual(convertWQElementsStatusToWFStatus(set(["Done"])), "completed") self.assertEqual(convertWQElementsStatusToWFStatus(set(["Canceled"])), "completed") self.assertEqual(convertWQElementsStatusToWFStatus(set(["Done", "Canceled"])), "completed") # workflows failed self.assertEqual(convertWQElementsStatusToWFStatus(set(["Failed"])), "failed") # non-failed workflows but with Failed elements self.assertEqual(convertWQElementsStatusToWFStatus(set(["Available", "Negotiating", "Acquired", "Failed"])), "running-open") self.assertEqual(convertWQElementsStatusToWFStatus(set(["Available", "Negotiating", "Acquired", "Running", "Done", "Failed"])), "running-open") self.assertEqual(convertWQElementsStatusToWFStatus(set(["Negotiating", "Acquired", "Running", "Done", "Canceled", "Failed"])), "running-open") self.assertEqual(convertWQElementsStatusToWFStatus(set(["Running", "Failed"])), "running-closed") self.assertEqual(convertWQElementsStatusToWFStatus(set(["Running", "Done", "Canceled", "Failed"])), "running-closed") self.assertEqual(convertWQElementsStatusToWFStatus(set(["Done", "Failed"])), "completed") self.assertEqual(convertWQElementsStatusToWFStatus(set(["Canceled", "Failed"])), "completed") # workflows in a temporary state, nothing to do with them yet self.assertIsNone(convertWQElementsStatusToWFStatus(set(["Done", "CancelRequested"]))) self.assertIsNone(convertWQElementsStatusToWFStatus(set(["CancelRequested"])))
class ReqMgr(dict): def __init__(self, *args, **kwargs): """ all the private valuable is defined for test values """ self.specGenerator = WMSpecGenerator() self.count = 0 self.maxWmSpec = kwargs.setdefault('numOfSpecs', 1) self.type = kwargs.setdefault("type", 'ReReco') if self.type not in ['ReReco', 'MonteCarlo']: raise TypeError('unknown request type %s' % self.type) self.splitter = kwargs.setdefault('splitter', 'DatasetBlock') self.inputDataset = kwargs.setdefault('inputDataset', None) self.dbsUrl = kwargs.setdefault('dbsUrl', None) self.status = {} self.progress = {} self.msg = {} self.names = [] self.openRunningTimeout = kwargs.setdefault('openRunningTimeout', 0) import logging self['logger'] = logging def getAssignment(self, teamName=None, request=None): if self.count < self.maxWmSpec: if self.type == 'ReReco': specName = "ReRecoTest_v%sEmulator" % self.count specUrl =self.specGenerator.createReRecoSpec(specName, "file", self.splitter, self.inputDataset, self.dbsUrl, OpenRunningTimeout = self.openRunningTimeout) elif self.type == 'MonteCarlo': specName = "MCTest_v%sEmulator" % self.count specUrl =self.specGenerator.createMCSpec(specName, "file", self.splitter) self.names.append(specName) self.status[specName] = 'assigned' #specName = "FakeProductionSpec_%s" % self.count #specUrl =self.specGenerator.createProductionSpec(specName, "file") #specName = "FakeProcessingSpec_%s" % self.count #specUrl =self.specGenerator.createProcessingSpec(specName, "file") self.count += 1 # returns list of list(rquest name, spec url) return [[specName, specUrl],] else: return [] def getRunningOpen(self, teamName): """Returns a list of request names of requests in running-open state""" result = [] for request in self.status: if self.status[request] == 'running-open': result.append(request) return result def getRequest(self, requestName): """Get request info""" if requestName not in self.names: raise RuntimeError("unknown request %s" % requestName) request = {'RequestName' : requestName, 'RequestStatus' : self.status[requestName], 'RequestPriority' : 100} if requestName in self.progress: request.update(self.progress[requestName]) request.setdefault('percent_complete', 0) request.setdefault('percent_success', 0) return request def putWorkQueue(self, reqName, prodAgentUrl=None): self.status[reqName] = 'acquired' def reportRequestStatus(self, name, status): if not check_allowed_transition(self.status[name], status): raise RuntimeError("Invalid status move: %s" % status) self.status[name] = status def reportRequestProgress(self, name, **args): self.progress.setdefault(name, {}) self.progress[name].update(args) def sendMessage(self, request, msg): self.msg[request] = msg def _removeSpecs(self): """ This is just for clean up not part of emulated function """ self.specGenerator.removeSpecs() def getTeam(self): """Give a fake team name""" return ["The A-Team", "some other bloke"]
class ReportEmuTest(unittest.TestCase): """ _ReportEmuTest_ """ def setUp(self): """ _setUp_ Setup some reasonable defaults for the ReReco workflow. """ self.unmergedLFNBase = "/store/backfill/2/unmerged" self.mergedLFNBase = "/store/backfill/2" self.processingVersion = "v1" self.cmsswVersion = "CMSSW_3_4_2_patch1" self.acquisitionEra = "WMAgentCommissioining10" self.primaryDataset = "MinimumBias" self.workload = WMSpecGenerator().createReRecoSpec("Tier1ReReco") print self.workload.data return def verifyOutputMetaData(self, outputFile, job): """ _verifyOutputMetaData_ Verify that metadata that in an emulated FWJR. Most of the meta data should be the same as the input file. """ goldenRuns = [] for inputFile in job["input_files"]: for run in inputFile["runs"]: goldenRuns.append(Run(run.run, *run.lumis)) assert len(outputFile["runs"]) == len(goldenRuns), \ "Error: Wrong number of runs in output file." for outputRun in outputFile["runs"]: for goldenRun in goldenRuns: if outputRun.run == goldenRun.run: goldenRun.lumis.sort() outputRun.lumis.sort() if goldenRun.lumis == outputRun.lumis: goldenRuns.remove(goldenRun) break assert len(goldenRuns) == 0, \ "Error: Run information wrong on output file." assert len(outputFile["locations"]) == 1, \ "Error: Wrong number of locations." assert list(outputFile["locations"])[0] == job["location"], \ "Error: Output file at the wrong location." assert outputFile["merged"] == False, \ "Error: Output should be unmerged." assert "adler32" in outputFile["checksums"].keys(), \ "Error: Adler32 checksum missing." assert "cksum" in outputFile["checksums"].keys(), \ "Error: CKSum checksum missing." return def testProcessing(self): """ _testProcessing_ Setup a processing workflow and job and verify that the FWJR produced by the emulator is reasonable. """ rerecoTask = self.workload.getTask("DataProcessing") cmsRunStep = rerecoTask.getStep("cmsRun1") inputFile = File(lfn="/path/to/test/lfn", size=1048576, events=1000, merged=True) inputFile.addRun(Run(1, *[1, 2, 3, 4, 5])) inputFile.addRun(Run(2, *[1, 2, 3, 4, 5, 6])) processingJob = Job(name="ProcessingJob", files=[inputFile]) processingJob["task"] = "/Tier1ReReco/ReReco" processingJob["mask"].setMaxAndSkipEvents(500, 0) processingJob["id"] = 1 processingJob["location"] = "cmssrm.fnal.gov" emu = ReportEmu(WMStep=cmsRunStep.getTypeHelper(), Job=processingJob) report = emu() reportInputFiles = report.getInputFilesFromStep("cmsRun1") assert len(reportInputFiles) == 1, \ "Error: Wrong number of input files for the job." assert reportInputFiles[0]["lfn"] == inputFile["lfn"], \ "Error: Input LFNs do not match: %s" % reportInputFiles[0]["lfn"] assert reportInputFiles[0]["size"] == inputFile["size"], \ "Error: Input file sizes do not match." assert reportInputFiles[0]["events"] == inputFile["events"], \ "Error: Input file events do not match." goldenRuns = [Run(1, *[1, 2, 3, 4, 5]), Run(2, *[1, 2, 3, 4, 5, 6])] assert len(reportInputFiles[0]["runs"]) == len(goldenRuns), \ "Error: Wrong number of runs in input file." for inputRun in reportInputFiles[0]["runs"]: for goldenRun in goldenRuns: if inputRun.run == goldenRun.run: goldenRun.lumis.sort() inputRun.lumis.sort() if goldenRun.lumis == inputRun.lumis: goldenRuns.remove(goldenRun) break assert len(goldenRuns) == 0, \ "Error: Run information wrong on input file." recoOutputFiles = report.getFilesFromOutputModule( "cmsRun1", "RECOoutput") alcaOutputFiles = report.getFilesFromOutputModule( "cmsRun1", "ALCARECOoutput") assert len(recoOutputFiles) == 1, \ "Error: There should only be one RECO output file." assert len(alcaOutputFiles) == 1, \ "Error: There should only be one ALCA output file." assert recoOutputFiles[0]["module_label"] == "RECOoutput", \ "Error: RECO file has wrong output module." assert alcaOutputFiles[0]["module_label"] == "ALCARECOoutput", \ "Error: ALCA file has wrong output module." self.verifyOutputMetaData(recoOutputFiles[0], processingJob) self.verifyOutputMetaData(alcaOutputFiles[0], processingJob) dataTierMap = {"RECOoutput": "RECO", "ALCARECOoutput": "ALCARECO"} for outputFile in [recoOutputFiles[0], alcaOutputFiles[0]]: assert outputFile["dataset"]["applicationName"] == "cmsRun", \ "Error: Application name is incorrect." assert outputFile["dataset"]["primaryDataset"] == self.primaryDataset, \ "Error: Primary dataset is incorrect." assert outputFile["dataset"]["dataTier"] == dataTierMap[outputFile["module_label"]], \ "Error: Data tier is incorrect." return def testMerge(self): """ _testMerge_ Setup a merge workflow and job and verify that the FWJR produced by the emulator is reasonable. """ #emu = ReportEmu(WMStep = self.cmssw, Job = self.job) #report = emu() return
class WorkQueueTest(unittest.TestCase): """ Test WorkQueue Service client It will start WorkQueue RESTService Server DB sets from environment variable. Client DB sets from environment variable. This checks whether DS call makes without error and return the results. Not the correctness of functions. That will be tested in different module. """ def setUp(self): """ _setUp_ """ EmulatorHelper.setEmulators(phedex = True, dbs = True, siteDB = True, requestMgr = True) self.specGenerator = WMSpecGenerator("WMSpecs") #self.configFile = EmulatorSetup.setupWMAgentConfig() self.schema = [] self.couchApps = ["WorkQueue"] self.testInit = TestInitCouchApp('WorkQueueServiceTest') self.testInit.setLogging() self.testInit.setDatabaseConnection() self.testInit.setSchema(customModules = self.schema, useDefault = False) self.testInit.setupCouch('workqueue_t', *self.couchApps) self.testInit.setupCouch('workqueue_t_inbox', *self.couchApps) self.testInit.setupCouch('local_workqueue_t', *self.couchApps) self.testInit.setupCouch('local_workqueue_t_inbox', *self.couchApps) self.testInit.generateWorkDir() return def tearDown(self): """ _tearDown_ Drop all the WMBS tables. """ self.testInit.tearDownCouch() EmulatorHelper.resetEmulators() self.specGenerator.removeSpecs() def testWorkQueueService(self): # test getWork specName = "RerecoSpec" specUrl = self.specGenerator.createReRecoSpec(specName, "file") globalQ = globalQueue(DbName = 'workqueue_t', QueueURL = self.testInit.couchUrl) self.assertTrue(globalQ.queueWork(specUrl, "RerecoSpec", "teamA") > 0) wqApi = WorkQueueDS(self.testInit.couchUrl, 'workqueue_t') #overwrite default - can't test with stale view wqApi.defaultOptions = {'reduce' : True, 'group' : True} #This only checks minimum client call not exactly correctness of return # values. self.assertEqual(wqApi.getTopLevelJobsByRequest(), [{'total_jobs': 10, 'request_name': specName}]) self.assertEqual(wqApi.getChildQueues(), []) self.assertEqual(wqApi.getJobStatusByRequest(), [{'status': 'Available', 'jobs': 10, 'request_name': specName}]) self.assertEqual(wqApi.getChildQueuesByRequest(), []) self.assertEqual(wqApi.getWMBSUrl(), []) self.assertEqual(wqApi.getWMBSUrlByRequest(), []) def testUpdatePriorityService(self): """ _testUpdatePriorityService_ Check that we can update the priority correctly also check the available workflows feature """ specName = "RerecoSpec" specUrl = self.specGenerator.createReRecoSpec(specName, "file") globalQ = globalQueue(DbName = 'workqueue_t', QueueURL = self.testInit.couchUrl) localQ = localQueue(DbName = 'local_workqueue_t', QueueURL = self.testInit.couchUrl, CacheDir = self.testInit.testDir, ParentQueueCouchUrl = '%s/workqueue_t' % self.testInit.couchUrl, ParentQueueInboxCouchDBName = 'workqueue_t_inbox' ) # Try a full chain of priority update and propagation self.assertTrue(globalQ.queueWork(specUrl, "RerecoSpec", "teamA") > 0) globalApi = WorkQueueDS(self.testInit.couchUrl, 'workqueue_t') #overwrite default - can't test with stale view globalApi.defaultOptions = {'reduce' : True, 'group' : True} globalApi.updatePriority(specName, 100) self.assertEqual(globalQ.backend.getWMSpec(specName).priority(), 100) storedElements = globalQ.backend.getElementsForWorkflow(specName) for element in storedElements: self.assertEqual(element['Priority'], 100) self.assertTrue(localQ.pullWork({'T2_XX_SiteA' : 10}) > 0) localQ.processInboundWork(continuous = False) storedElements = localQ.backend.getElementsForWorkflow(specName) for element in storedElements: self.assertEqual(element['Priority'], 100) localApi = WorkQueueDS(self.testInit.couchUrl, 'local_workqueue_t') #overwrite default - can't test with stale view localApi.defaultOptions = {'reduce' : True, 'group' : True} localApi.updatePriority(specName, 500) self.assertEqual(localQ.backend.getWMSpec(specName).priority(), 500) storedElements = localQ.backend.getElementsForWorkflow(specName) for element in storedElements: self.assertEqual(element['Priority'], 500) self.assertEqual(localApi.getAvailableWorkflows(), set([(specName, 500)])) # Attempt to update an inexistent workflow in the queue try: globalApi.updatePriority('NotExistent', 2) except: self.fail('No exception should be raised.')
class TestChangeState(unittest.TestCase): def setUp(self): """ _setUp_ """ self.transitions = Transitions() self.testInit = TestInitCouchApp(__file__) self.testInit.setLogging() self.testInit.setDatabaseConnection() self.testInit.setupCouch("changestate_t/jobs", "JobDump") self.testInit.setupCouch("changestate_t/fwjrs", "FWJRDump") self.testInit.setupCouch("job_summary", "WMStats") self.testInit.setSchema(customModules = ["WMCore.WMBS"], useDefault = False) myThread = threading.currentThread() self.daoFactory = DAOFactory(package = "WMCore.WMBS", logger = myThread.logger, dbinterface = myThread.dbi) couchurl = os.getenv("COUCHURL") self.couchServer = CouchServer(dburl = couchurl) self.config = self.testInit.getConfiguration() self.taskName = "/TestWorkflow/ReReco1" self.specGen = WMSpecGenerator() self.specUrl = self.specGen.createProcessingSpec("TestWorkflow", returnType="file") return def tearDown(self): """ _tearDown_ Cleanup the databases. """ self.testInit.clearDatabase() self.testInit.tearDownCouch() self.specGen.removeSpecs() return def testCheck(self): """ This is the test class for function Check from module ChangeState """ change = ChangeState(self.config, "changestate_t") # Run through all good state transitions and assert that they work for state in self.transitions.keys(): for dest in self.transitions[state]: change.check(dest, state) dummystates = ['dummy1', 'dummy2', 'dummy3', 'dummy4'] # Then run through some bad state transistions and assertRaises(AssertionError) for state in self.transitions.keys(): for dest in dummystates: self.assertRaises(AssertionError, change.check, dest, state) return def testRecordInCouch(self): """ _testRecordInCouch_ Verify that jobs, state transitions and fwjrs are recorded correctly. """ change = ChangeState(self.config, "changestate_t") locationAction = self.daoFactory(classname="Locations.New") locationAction.execute("site1", pnn="T2_CH_CERN") testWorkflow = Workflow(spec=self.specUrl, owner="Steve", name="wf001", task=self.taskName) testWorkflow.create() testFileset = Fileset(name="TestFileset") testFileset.create() testSubscription = Subscription(fileset=testFileset, workflow=testWorkflow, split_algo="FileBased") testSubscription.create() testFileA = File(lfn="SomeLFNA", events=1024, size=2048, locations=set(["T2_CH_CERN"])) testFileB = File(lfn="SomeLFNB", events=1025, size=2049, locations=set(["T2_CH_CERN"])) testFileA.create() testFileB.create() testFileset.addFile(testFileA) testFileset.addFile(testFileB) testFileset.commit() splitter = SplitterFactory() jobFactory = splitter(package="WMCore.WMBS", subscription=testSubscription) jobGroup = jobFactory(files_per_job=1)[0] assert len(jobGroup.jobs) == 2, \ "Error: Splitting should have created two jobs." testJobA = jobGroup.jobs[0] testJobA["user"] = "******" testJobA["group"] = "DMWM" testJobA["taskType"] = "Merge" testJobB = jobGroup.jobs[1] testJobB["user"] = "******" testJobB["group"] = "DMWM" testJobB["taskType"] = "Processing" change.propagate([testJobA, testJobB], "new", "none") change.propagate([testJobA, testJobB], "created", "new") change.propagate([testJobA, testJobB], "executing", "created") testJobADoc = change.jobsdatabase.document(testJobA["couch_record"]) for transition in testJobADoc["states"].itervalues(): self.assertTrue(isinstance(transition["timestamp"], int) or isinstance(transition["timestamp"], long)) self.assertEqual(testJobADoc["jobid"] , testJobA["id"], "Error: ID parameter is incorrect.") assert testJobADoc["name"] == testJobA["name"], \ "Error: Name parameter is incorrect." assert testJobADoc["jobgroup"] == testJobA["jobgroup"], \ "Error: Jobgroup parameter is incorrect." assert testJobADoc["workflow"] == testJobA["workflow"], \ "Error: Workflow parameter is incorrect." assert testJobADoc["task"] == testJobA["task"], \ "Error: Task parameter is incorrect." assert testJobADoc["owner"] == testJobA["owner"], \ "Error: Owner parameter is incorrect." assert testJobADoc["mask"]["FirstEvent"] == testJobA["mask"]["FirstEvent"], \ "Error: First event in mask is incorrect." assert testJobADoc["mask"]["LastEvent"] == testJobA["mask"]["LastEvent"], \ "Error: Last event in mask is incorrect." assert testJobADoc["mask"]["FirstLumi"] == testJobA["mask"]["FirstLumi"], \ "Error: First lumi in mask is incorrect." assert testJobADoc["mask"]["LastLumi"] == testJobA["mask"]["LastLumi"], \ "Error: First lumi in mask is incorrect." assert testJobADoc["mask"]["FirstRun"] == testJobA["mask"]["FirstRun"], \ "Error: First run in mask is incorrect." assert testJobADoc["mask"]["LastEvent"] == testJobA["mask"]["LastRun"], \ "Error: First event in mask is incorrect." assert len(testJobADoc["inputfiles"]) == 1, \ "Error: Input files parameter is incorrect." testJobBDoc = change.jobsdatabase.document(testJobB["couch_record"]) assert testJobBDoc["jobid"] == testJobB["id"], \ "Error: ID parameter is incorrect." assert testJobBDoc["name"] == testJobB["name"], \ "Error: Name parameter is incorrect." assert testJobBDoc["jobgroup"] == testJobB["jobgroup"], \ "Error: Jobgroup parameter is incorrect." assert testJobBDoc["mask"]["FirstEvent"] == testJobB["mask"]["FirstEvent"], \ "Error: First event in mask is incorrect." assert testJobBDoc["mask"]["LastEvent"] == testJobB["mask"]["LastEvent"], \ "Error: Last event in mask is incorrect." assert testJobBDoc["mask"]["FirstLumi"] == testJobB["mask"]["FirstLumi"], \ "Error: First lumi in mask is incorrect." assert testJobBDoc["mask"]["LastLumi"] == testJobB["mask"]["LastLumi"], \ "Error: First lumi in mask is incorrect." assert testJobBDoc["mask"]["FirstRun"] == testJobB["mask"]["FirstRun"], \ "Error: First run in mask is incorrect." assert testJobBDoc["mask"]["LastEvent"] == testJobB["mask"]["LastRun"], \ "Error: First event in mask is incorrect." assert len(testJobBDoc["inputfiles"]) == 1, \ "Error: Input files parameter is incorrect." changeStateDB = self.couchServer.connectDatabase(dbname="changestate_t/jobs") allDocs = changeStateDB.document("_all_docs") self.assertEqual(len(allDocs["rows"]), 3, "Error: Wrong number of documents.") couchJobDoc = changeStateDB.document("1") assert couchJobDoc["name"] == testJobA["name"], \ "Error: Name is wrong" assert len(couchJobDoc["inputfiles"]) == 1, \ "Error: Wrong number of input files." result = changeStateDB.loadView("JobDump", "jobsByWorkflowName") self.assertEqual(len(result["rows"]), 2, "Error: Wrong number of rows.") for row in result["rows"]: couchJobDoc = changeStateDB.document(row["value"]["id"]) self.assertEqual(couchJobDoc["_rev"], row["value"]["rev"], "Error: Rev is wrong.") return def testUpdateFailedDoc(self): """ _testUpdateFailedDoc_ Verify that the update function will work correctly and not throw a 500 error if the doc didn't make it into the database for some reason. """ change = ChangeState(self.config, "changestate_t") locationAction = self.daoFactory(classname="Locations.New") locationAction.execute("site1", pnn="T2_CH_CERN") testWorkflow = Workflow(spec=self.specUrl, owner="Steve", name="wf001", task=self.taskName) testWorkflow.create() testFileset = Fileset(name = "TestFileset") testFileset.create() testSubscription = Subscription(fileset=testFileset, workflow=testWorkflow, split_algo="FileBased") testSubscription.create() testFileA = File(lfn="SomeLFNA", events=1024, size=2048, locations=set(["T2_CH_CERN"])) testFileA.create() testFileset.addFile(testFileA) testFileset.commit() splitter = SplitterFactory() jobFactory = splitter(package="WMCore.WMBS", subscription=testSubscription) jobGroup = jobFactory(files_per_job=1)[0] testJobA = jobGroup.jobs[0] testJobA["user"] = "******" testJobA["group"] = "DMWM" testJobA["taskType"] = "Merge" testJobA["couch_record"] = str(testJobA["id"]) change.propagate([testJobA], "new", "none") testJobADoc = change.jobsdatabase.document(testJobA["couch_record"]) self.assertTrue("states" in testJobADoc) self.assertTrue("1" in testJobADoc["states"]) return def testPersist(self): """ _testPersist_ This is the test class for function Propagate from module ChangeState """ change = ChangeState(self.config, "changestate_t") locationAction = self.daoFactory(classname="Locations.New") locationAction.execute("site1", pnn="T2_CH_CERN") testWorkflow = Workflow(spec=self.specUrl, owner="Steve", name="wf001", task=self.taskName) testWorkflow.create() testFileset = Fileset(name="TestFileset") testFileset.create() for i in range(4): newFile = File(lfn="File%s" % i, locations=set(["T2_CH_CERN"])) newFile.create() testFileset.addFile(newFile) testFileset.commit() testSubscription = Subscription(fileset=testFileset, workflow=testWorkflow, split_algo="FileBased") testSubscription.create() splitter = SplitterFactory() jobFactory = splitter(package="WMCore.WMBS", subscription=testSubscription) jobGroup = jobFactory(files_per_job=1)[0] assert len(jobGroup.jobs) == 4, \ "Error: Splitting should have created four jobs." testJobA = jobGroup.jobs[0] testJobA["user"] = "******" testJobA["group"] = "DMWM" testJobA["taskType"] = "Processing" testJobB = jobGroup.jobs[1] testJobB["user"] = "******" testJobB["group"] = "DMWM" testJobB["taskType"] = "Processing" testJobC = jobGroup.jobs[2] testJobC["user"] = "******" testJobC["group"] = "DMWM" testJobC["taskType"] = "Processing" testJobD = jobGroup.jobs[3] testJobD["user"] = "******" testJobD["group"] = "DMWM" testJobD["taskType"] = "Processing" change.persist([testJobA, testJobB], "created", "new") change.persist([testJobC, testJobD], "new", "none") stateDAO = self.daoFactory(classname="Jobs.GetState") jobAState = stateDAO.execute(id=testJobA["id"]) jobBState = stateDAO.execute(id=testJobB["id"]) jobCState = stateDAO.execute(id=testJobC["id"]) jobDState = stateDAO.execute(id=testJobD["id"]) assert jobAState == "created" and jobBState =="created" and \ jobCState == "new" and jobDState == "new", \ "Error: Jobs didn't change state correctly." return def testRetryCount(self): """ _testRetryCount_ Verify that the retry count is incremented when we move out of the submitcooloff or jobcooloff state. """ change = ChangeState(self.config, "changestate_t") locationAction = self.daoFactory(classname = "Locations.New") locationAction.execute("site1", pnn = "T2_CH_CERN") testWorkflow = Workflow(spec=self.specUrl, owner="Steve", name="wf001", task=self.taskName) testWorkflow.create() testFileset = Fileset(name="TestFileset") testFileset.create() for i in range(4): newFile = File(lfn="File%s" % i, locations=set(["T2_CH_CERN"])) newFile.create() testFileset.addFile(newFile) testFileset.commit() testSubscription = Subscription(fileset=testFileset, workflow=testWorkflow, split_algo="FileBased") testSubscription.create() splitter = SplitterFactory() jobFactory = splitter(package="WMCore.WMBS", subscription=testSubscription) jobGroup = jobFactory(files_per_job=1)[0] assert len(jobGroup.jobs) == 4, \ "Error: Splitting should have created four jobs." testJobA = jobGroup.jobs[0] testJobA["user"] = "******" testJobA["group"] = "DMWM" testJobA["taskType"] = "Processing" testJobB = jobGroup.jobs[1] testJobB["user"] = "******" testJobB["group"] = "DMWM" testJobB["taskType"] = "Processing" testJobC = jobGroup.jobs[2] testJobC["user"] = "******" testJobC["group"] = "DMWM" testJobC["taskType"] = "Processing" testJobD = jobGroup.jobs[3] testJobD["user"] = "******" testJobD["group"] = "DMWM" testJobD["taskType"] = "Processing" change.persist([testJobA], "created", "submitcooloff") change.persist([testJobB], "created", "jobcooloff") change.persist([testJobC, testJobD], "new", "none") testJobA.load() testJobB.load() testJobC.load() testJobD.load() assert testJobA["retry_count"] == 1, \ "Error: Retry count is wrong." assert testJobB["retry_count"] == 1, \ "Error: Retry count is wrong." assert testJobC["retry_count"] == 0, \ "Error: Retry count is wrong." assert testJobD["retry_count"] == 0, \ "Error: Retry count is wrong." return def testJobSerialization(self): """ _testJobSerialization_ Verify that serialization of a job works when adding a FWJR. """ change = ChangeState(self.config, "changestate_t") locationAction = self.daoFactory(classname="Locations.New") locationAction.execute("site1", pnn="T2_CH_CERN") testWorkflow = Workflow(spec=self.specUrl, owner="Steve", name="wf001", task=self.taskName) testWorkflow.create() testFileset = Fileset(name="TestFileset") testFileset.create() testFile = File(lfn="SomeLFNC", locations=set(["T2_CH_CERN"])) testFile.create() testFileset.addFile(testFile) testFileset.commit() testSubscription = Subscription(fileset=testFileset, workflow=testWorkflow) testSubscription.create() splitter = SplitterFactory() jobFactory = splitter(package="WMCore.WMBS", subscription=testSubscription) jobGroup = jobFactory(files_per_job=1)[0] assert len(jobGroup.jobs) == 1, \ "Error: Splitting should have created one job." testJobA = jobGroup.jobs[0] testJobA["user"] = "******" testJobA["group"] = "DMWM" testJobA["taskType"] = "Processing" change.propagate([testJobA], 'created', 'new') myReport = Report() reportPath = os.path.join(getTestBase(), "WMCore_t/JobStateMachine_t/Report.pkl") myReport.unpersist(reportPath) testJobA["fwjr"] = myReport change.propagate([testJobA], 'executing', 'created') changeStateDB = self.couchServer.connectDatabase(dbname = "changestate_t/fwjrs") allDocs = changeStateDB.document("_all_docs") self.assertEqual(len(allDocs["rows"]), 2, "Error: Wrong number of documents") result = changeStateDB.loadView("FWJRDump", "fwjrsByWorkflowName") self.assertEqual(len(result["rows"]), 1, "Error: Wrong number of rows.") for row in result["rows"]: couchJobDoc = changeStateDB.document(row["value"]["id"]) self.assertEqual(couchJobDoc["_rev"], row["value"]["rev"], "Error: Rev is wrong.") for resultRow in allDocs["rows"]: if resultRow["id"] != "_design/FWJRDump": fwjrDoc = changeStateDB.document(resultRow["id"]) break assert fwjrDoc["retrycount"] == 0, \ "Error: Retry count is wrong." assert len(fwjrDoc["fwjr"]["steps"].keys()) == 2, \ "Error: Wrong number of steps in FWJR." assert "cmsRun1" in fwjrDoc["fwjr"]["steps"].keys(), \ "Error: cmsRun1 step is missing from FWJR." assert "stageOut1" in fwjrDoc["fwjr"]["steps"].keys(), \ "Error: stageOut1 step is missing from FWJR." return def testDuplicateJobReports(self): """ _testDuplicateJobReports_ Verify that everything works correctly if a job report is added to the database more than once. """ change = ChangeState(self.config, "changestate_t") locationAction = self.daoFactory(classname="Locations.New") locationAction.execute("site1", pnn="T2_CH_CERN") testWorkflow = Workflow(spec=self.specUrl, owner="Steve", name="wf001", task=self.taskName) testWorkflow.create() testFileset = Fileset(name="TestFileset") testFileset.create() testFile = File(lfn="SomeLFNC", locations=set(["T2_CH_CERN"])) testFile.create() testFileset.addFile(testFile) testFileset.commit() testSubscription = Subscription(fileset=testFileset, workflow=testWorkflow) testSubscription.create() splitter = SplitterFactory() jobFactory = splitter(package="WMCore.WMBS", subscription=testSubscription) jobGroup = jobFactory(files_per_job=1)[0] assert len(jobGroup.jobs) == 1, \ "Error: Splitting should have created one job." testJobA = jobGroup.jobs[0] testJobA["user"] = "******" testJobA["group"] = "DMWM" testJobA["taskType"] = "Processing" change.propagate([testJobA], 'created', 'new') myReport = Report() reportPath = os.path.join(getTestBase(), "WMCore_t/JobStateMachine_t/Report.pkl") myReport.unpersist(reportPath) testJobA["fwjr"] = myReport change.propagate([testJobA], 'executing', 'created') change.propagate([testJobA], 'executing', 'created') changeStateDB = self.couchServer.connectDatabase(dbname="changestate_t/fwjrs") allDocs = changeStateDB.document("_all_docs") self.assertEqual(len(allDocs["rows"]), 2, "Error: Wrong number of documents") for resultRow in allDocs["rows"]: if resultRow["id"] != "_design/FWJRDump": changeStateDB.document(resultRow["id"]) break return def testJobKilling(self): """ _testJobKilling_ Test that we can successfully set jobs to the killed state """ change = ChangeState(self.config, "changestate_t") locationAction = self.daoFactory(classname="Locations.New") locationAction.execute("site1", pnn="T2_CH_CERN") testWorkflow = Workflow(spec=self.specUrl, owner="Steve", name="wf001", task=self.taskName) testWorkflow.create() testFileset = Fileset(name="TestFileset") testFileset.create() for i in range(4): newFile = File(lfn="File%s" % i, locations=set(["T2_CH_CERN"])) newFile.create() testFileset.addFile(newFile) testFileset.commit() testSubscription = Subscription(fileset=testFileset, workflow=testWorkflow, split_algo="FileBased") testSubscription.create() splitter = SplitterFactory() jobFactory = splitter(package="WMCore.WMBS", subscription=testSubscription) jobGroup = jobFactory(files_per_job=1)[0] assert len(jobGroup.jobs) == 4, \ "Error: Splitting should have created four jobs." testJobA = jobGroup.jobs[0] testJobA["user"] = "******" testJobA["group"] = "DMWM" testJobA["taskType"] = "Processing" testJobB = jobGroup.jobs[1] testJobB["user"] = "******" testJobB["group"] = "DMWM" testJobB["taskType"] = "Processing" testJobC = jobGroup.jobs[2] testJobC["user"] = "******" testJobC["group"] = "DMWM" testJobC["taskType"] = "Processing" testJobD = jobGroup.jobs[3] testJobD["user"] = "******" testJobD["group"] = "DMWM" testJobD["taskType"] = "Processing" change.persist([testJobA], "created", "new") change.persist([testJobB], "jobfailed", "executing") change.persist([testJobC, testJobD], "executing", "created") change.persist([testJobA], "killed", "created") change.persist([testJobB], "killed", "jobfailed") change.persist([testJobC, testJobD], "killed", "executing") for job in [testJobA, testJobB, testJobC, testJobD]: job.load() self.assertEqual(job['retry_count'], 99999) self.assertEqual(job['state'], 'killed') return def testFWJRInputFileTruncation(self): """ _testFWJRInputFileTruncation_ Test and see whether the ChangeState code can be used to automatically truncate the number of input files in a FWJR Code stolen from the serialization test """ self.config.JobStateMachine.maxFWJRInputFiles = 0 change = ChangeState(self.config, "changestate_t") locationAction = self.daoFactory(classname="Locations.New") locationAction.execute("site1", pnn="T2_CH_CERN") testWorkflow = Workflow(spec=self.specUrl, owner="Steve", name="wf001", task=self.taskName) testWorkflow.create() testFileset = Fileset(name="TestFileset") testFileset.create() testFile = File(lfn="SomeLFNC", locations=set(["T2_CH_CERN"])) testFile.create() testFileset.addFile(testFile) testFileset.commit() testSubscription = Subscription(fileset=testFileset, workflow=testWorkflow) testSubscription.create() splitter = SplitterFactory() jobFactory = splitter(package="WMCore.WMBS", subscription=testSubscription) jobGroup = jobFactory(files_per_job=1)[0] self.assertEqual(len(jobGroup.jobs), 1, "Error: Splitting should have created one job.") testJobA = jobGroup.jobs[0] testJobA["user"] = "******" testJobA["group"] = "DMWM" testJobA["taskType"] = "Processing" change.propagate([testJobA], 'created', 'new') myReport = Report() reportPath = os.path.join(getTestBase(), "WMCore_t/JobStateMachine_t/Report.pkl") myReport.unpersist(reportPath) testJobA["fwjr"] = myReport change.propagate([testJobA], 'executing', 'created') changeStateDB = self.couchServer.connectDatabase(dbname="changestate_t/fwjrs") allDocs = changeStateDB.document("_all_docs") self.assertEqual(len(allDocs["rows"]), 2, "Error: Wrong number of documents") result = changeStateDB.loadView("FWJRDump", "fwjrsByWorkflowName") self.assertEqual(len(result["rows"]), 1, "Error: Wrong number of rows.") for row in result["rows"]: couchJobDoc = changeStateDB.document(row["value"]["id"]) self.assertEqual(couchJobDoc["_rev"], row["value"]["rev"], "Error: Rev is wrong.") for resultRow in allDocs["rows"]: if resultRow["id"] != "_design/FWJRDump": fwjrDoc = changeStateDB.document(resultRow["id"]) break self.assertEqual(fwjrDoc["fwjr"]["steps"]['cmsRun1']['input']['source'], []) return def testJobSummary(self): """ _testJobSummary_ verify that job summary for jobs with fwjr are correctly created and that status is updated when updatesummary flag is enabled """ change = ChangeState(self.config, "changestate_t") locationAction = self.daoFactory(classname="Locations.New") locationAction.execute("site1", pnn="T2_CH_CERN") testWorkflow = Workflow(spec=self.specUrl, owner="Steve", name="wf001", task=self.taskName) testWorkflow.create() testFileset = Fileset(name="TestFileset") testFileset.create() testFile = File(lfn="SomeLFNC", locations=set(["T2_CH_CERN"])) testFile.create() testFileset.addFile(testFile) testFileset.commit() testSubscription = Subscription(fileset=testFileset, workflow=testWorkflow) testSubscription.create() splitter = SplitterFactory() jobFactory = splitter(package="WMCore.WMBS", subscription=testSubscription) jobGroup = jobFactory(files_per_job=1)[0] assert len(jobGroup.jobs) == 1, \ "Error: Splitting should have created one job." testJobA = jobGroup.jobs[0] testJobA["user"] = "******" testJobA["group"] = "DMWM" testJobA["taskType"] = "Production" change.propagate([testJobA], 'created', 'new') myReport = Report() reportPath = os.path.join(getTestBase(), "WMCore_t/JobStateMachine_t/Report.pkl") myReport.unpersist(reportPath) change.propagate([testJobA], 'executing', 'created') testJobA["fwjr"] = myReport change.propagate([testJobA], 'jobfailed', 'executing') changeStateDB = self.couchServer.connectDatabase(dbname=self.config.JobStateMachine.jobSummaryDBName) allDocs = changeStateDB.document("_all_docs") self.assertEqual(len(allDocs["rows"]), 2, "Error: Wrong number of documents") fwjrDoc = {'state': None} for resultRow in allDocs["rows"]: if resultRow["id"] != "_design/WMStats": fwjrDoc = changeStateDB.document(resultRow["id"]) break self.assertEqual(fwjrDoc['state'], 'jobfailed', "Error: summary doesn't have the expected job state") del testJobA["fwjr"] change.propagate([testJobA], 'jobcooloff', 'jobfailed', updatesummary=True) return def testIndexConflict(self): """ _testIndexConflict_ Verify that in case of conflict in the job index we discard the old document and replace with a new one """ change = ChangeState(self.config, "changestate_t") locationAction = self.daoFactory(classname="Locations.New") locationAction.execute("site1", pnn="T2_CH_CERN") testWorkflow = Workflow(spec=self.specUrl, owner="Steve", name="wf001", task=self.taskName) testWorkflow.create() testFileset = Fileset(name="TestFileset") testFileset.create() testFile = File(lfn="SomeLFNC", locations=set(["T2_CH_CERN"])) testFile.create() testFileset.addFile(testFile) testFileset.commit() testSubscription = Subscription(fileset=testFileset, workflow=testWorkflow) testSubscription.create() splitter = SplitterFactory() jobFactory = splitter(package="WMCore.WMBS", subscription=testSubscription) jobGroup = jobFactory(files_per_job=1)[0] assert len(jobGroup.jobs) == 1, \ "Error: Splitting should have created one job." testJobA = jobGroup.jobs[0] testJobA["user"] = "******" testJobA["group"] = "CompOps" testJobA["taskType"] = "Processing" myReport = Report() reportPath = os.path.join(getTestBase(), "WMCore_t/JobStateMachine_t/Report.pkl") myReport.unpersist(reportPath) testJobA["fwjr"] = myReport change.propagate([testJobA], 'created', 'new') jobdatabase = self.couchServer.connectDatabase('changestate_t/jobs', False) fwjrdatabase = self.couchServer.connectDatabase('changestate_t/fwjrs', False) jobDoc = jobdatabase.document("1") fwjrDoc = fwjrdatabase.document("1-0") self.assertEqual(jobDoc["workflow"], "wf001", "Wrong workflow in couch job document") self.assertEqual(fwjrDoc["fwjr"]["task"], self.taskName, "Wrong task in fwjr couch document") testJobA.delete() myThread = threading.currentThread() myThread.dbi.processData("ALTER TABLE wmbs_job AUTO_INCREMENT = 1") testWorkflow = Workflow(spec=self.specUrl, owner="Steve", name="wf002", task="/TestWorkflow/Test2") testWorkflow.create() testFileset = Fileset(name="TestFilesetB") testFileset.create() testFile = File(lfn="SomeLFNB", locations=set(["T2_CH_CERN"])) testFile.create() testFileset.addFile(testFile) testFileset.commit() testSubscription = Subscription(fileset=testFileset, workflow=testWorkflow) testSubscription.create() splitter = SplitterFactory() jobFactory = splitter(package="WMCore.WMBS", subscription=testSubscription) jobGroup = jobFactory(files_per_job=1)[0] testJobB = jobGroup.jobs[0] testJobB["user"] = "******" testJobB["group"] = "CompOps" testJobB["taskType"] = "Processing" testJobB["fwjr"] = myReport change.propagate([testJobB], 'created', 'new') jobDoc = jobdatabase.document("1") fwjrDoc = fwjrdatabase.document("1-0") self.assertEqual(jobDoc["workflow"], "wf002", "Job document was not overwritten") self.assertEqual(fwjrDoc["fwjr"]["task"], "/TestWorkflow/Test2", "FWJR document was not overwritten") return def testUpdateLocation(self): """ _testUpdateLocation_ Check that we can update the location of a job through the state machine. """ change = ChangeState(self.config, "changestate_t") locationAction = self.daoFactory(classname="Locations.New") locationAction.execute("site1", pnn="T2_CH_CERN") locationAction.execute("site2", pnn="T1_US_FNAL_Disk") testWorkflow = Workflow(spec=self.specUrl, owner="Steve", name="wf001", task=self.taskName) testWorkflow.create() testFileset = Fileset(name="TestFileset") testFileset.create() testSubscription = Subscription(fileset=testFileset, workflow=testWorkflow, split_algo="FileBased") testSubscription.create() testFileA = File(lfn="SomeLFNA", events=1024, size=2048, locations=set(["T2_CH_CERN", "T1_US_FNAL_Disk"])) testFileB = File(lfn="SomeLFNB", events=1025, size=2049, locations=set(["T2_CH_CERN", "T1_US_FNAL_Disk"])) testFileA.create() testFileB.create() testFileset.addFile(testFileA) testFileset.addFile(testFileB) testFileset.commit() splitter = SplitterFactory() jobFactory = splitter(package="WMCore.WMBS", subscription=testSubscription) jobGroup = jobFactory(files_per_job=1)[0] assert len(jobGroup.jobs) == 2, \ "Error: Splitting should have created two jobs." testJobA = jobGroup.jobs[0] testJobA["user"] = "******" testJobA["group"] = "DMWM" testJobA["taskType"] = "Merge" testJobA["site_cms_name"] = "site1" testJobB = jobGroup.jobs[1] testJobB["user"] = "******" testJobB["group"] = "DMWM" testJobB["taskType"] = "Processing" testJobB["site_cms_name"] = "site2" change.propagate([testJobA, testJobB], "new", "none") change.propagate([testJobA, testJobB], "created", "new") change.propagate([testJobA, testJobB], "executing", "created") testJobADoc = change.jobsdatabase.document(testJobA["couch_record"]) maxKey = max(testJobADoc["states"].keys()) transition = testJobADoc["states"][maxKey] self.assertEqual(transition["location"], "site1") testJobBDoc = change.jobsdatabase.document(testJobB["couch_record"]) maxKey = max(testJobBDoc["states"].keys()) transition = testJobBDoc["states"][maxKey] self.assertEqual(transition["location"], "site2") jobs = [{'jobid' : 1, 'location' : 'site2'}] change.recordLocationChange(jobs) testJobADoc = change.jobsdatabase.document(testJobA["couch_record"]) maxKey = max(testJobADoc["states"].keys()) transition = testJobADoc["states"][maxKey] self.assertEqual(transition["location"], "site2") listJobsDAO = self.daoFactory(classname="Jobs.GetLocation") jobid = [{'jobid' : 1}, {'jobid' : 2}] jobsLocation = listJobsDAO.execute(jobid) for job in jobsLocation: self.assertEqual(job['site_name'], 'site2') return
def __init__(self, *args, **kwargs): print "Using RequestManager Emulator ..." self.specGenerator = WMSpecGenerator() self.count = 0 self.maxWmSpec = 1
class LocalWorkQueueProfileTest(WorkQueueTestCase): """ _WorkQueueTest_ """ def setUp(self): """ If we dont have a wmspec file create one """ EmulatorHelper.setEmulators(phedex=True, dbs=True, siteDB=True, requestMgr=True) WorkQueueTestCase.setUp(self) self.cacheDir = tempfile.mkdtemp() self.specGenerator = WMSpecGenerator(self.cacheDir) self.specs = self.createReRecoSpec(1, "file") # Create queues self.localQueue = localQueue(DbName=self.queueDB, InboxDbName=self.queueInboxDB, NegotiationTimeout=0, QueueURL='global.example.com', CacheDir=self.cacheDir) def tearDown(self): """tearDown""" WorkQueueTestCase.tearDown(self) try: shutil.rmtree(self.cacheDir) self.specGenerator.removeSpecs() except: pass EmulatorHelper.resetEmulators() def createReRecoSpec(self, numOfSpec, type="spec"): specs = [] for i in range(numOfSpec): specName = "MinBiasProcessingSpec_Test_%s" % (i + 1) specs.append(self.specGenerator.createReRecoSpec(specName, type)) return specs def createProfile(self, name, function): file = name prof = cProfile.Profile() prof.runcall(function) prof.dump_stats(file) p = pstats.Stats(file) p.strip_dirs().sort_stats('cumulative').print_stats(0.1) p.strip_dirs().sort_stats('time').print_stats(0.1) p.strip_dirs().sort_stats('calls').print_stats(0.1) #p.strip_dirs().sort_stats('name').print_stats(10) def testGetWorkLocalQueue(self): i = 0 for spec in self.specs: i += 1 specName = "MinBiasProcessingSpec_Test_%s" % i self.localQueue.queueWork(spec, specName, team="A-team") self.localQueue.updateLocationInfo() self.createProfile('getWorkProfile.prof', self.localQueueGetWork) def localQueueGetWork(self): siteJobs = {} for site in Globals.SITES: siteJobs[site] = 100000 self.localQueue.getWork(siteJobs, {})
class ReqMgr(dict): def __init__(self, *args, **kwargs): """ all the private valuable is defined for test values """ self.specGenerator = WMSpecGenerator() self.count = 0 self.maxWmSpec = kwargs.setdefault('numOfSpecs', 1) self.type = kwargs.setdefault("type", 'ReReco') if self.type not in ['ReReco', 'MonteCarlo']: raise TypeError('unknown request type %s' % self.type) self.splitter = kwargs.setdefault('splitter', 'DatasetBlock') self.inputDataset = kwargs.setdefault('inputDataset', None) self.dbsUrl = kwargs.setdefault('dbsUrl', None) self.status = {} self.progress = {} self.msg = {} self.names = [] self.openRunningTimeout = kwargs.setdefault('openRunningTimeout', 0) import logging self['logger'] = logging def getAssignment(self, teamName=None, request=None): if self.count < self.maxWmSpec: if self.type == 'ReReco': specName = "ReRecoTest_v%sEmulator" % self.count specUrl = self.specGenerator.createReRecoSpec( specName, "file", self.splitter, InputDataset=self.inputDataset, DbsUrl=self.dbsUrl, OpenRunningTimeout=self.openRunningTimeout) elif self.type == 'MonteCarlo': specName = "MCTest_v%sEmulator" % self.count specUrl = self.specGenerator.createMCSpec( specName, "file", self.splitter) self.names.append(specName) self.status[specName] = 'assigned' #specName = "FakeProductionSpec_%s" % self.count #specUrl =self.specGenerator.createProductionSpec(specName, "file") #specName = "FakeProcessingSpec_%s" % self.count #specUrl =self.specGenerator.createProcessingSpec(specName, "file") self.count += 1 # returns list of list(rquest name, spec url) return [ [specName, specUrl], ] else: return [] def getRunningOpen(self, teamName): """Returns a list of request names of requests in running-open state""" result = [] for request in self.status: if self.status[request] == 'running-open': result.append(request) return result def getRequest(self, requestName): """Get request info""" if requestName not in self.names: raise RuntimeError("unknown request %s" % requestName) request = { 'RequestName': requestName, 'RequestStatus': self.status[requestName], 'RequestPriority': 100 } if requestName in self.progress: request.update(self.progress[requestName]) request.setdefault('percent_complete', 0) request.setdefault('percent_success', 0) return request def putWorkQueue(self, reqName, prodAgentUrl=None): self.status[reqName] = 'acquired' def reportRequestStatus(self, name, status): if not check_allowed_transition(self.status[name], status): raise RuntimeError("Invalid status move: %s" % status) self.status[name] = status def reportRequestProgress(self, name, **args): self.progress.setdefault(name, {}) self.progress[name].update(args) def sendMessage(self, request, msg): self.msg[request] = msg def _removeSpecs(self): """ This is just for clean up not part of emulated function """ self.specGenerator.removeSpecs() def getTeam(self): """Give a fake team name""" return ["The A-Team", "some other bloke"]
class WorkQueueTest(unittest.TestCase): """ Test WorkQueue Service client It will start WorkQueue RESTService Server DB sets from environment variable. Client DB sets from environment variable. This checks whether DS call makes without error and return the results. Not the correctness of functions. That will be tested in different module. """ def setUp(self): """ _setUp_ """ EmulatorHelper.setEmulators(phedex=True, dbs=True, siteDB=True, requestMgr=True) self.specGenerator = WMSpecGenerator("WMSpecs") #self.configFile = EmulatorSetup.setupWMAgentConfig() self.schema = [] self.couchApps = ["WorkQueue"] self.testInit = TestInitCouchApp('WorkQueueServiceTest') self.testInit.setLogging() self.testInit.setDatabaseConnection() self.testInit.setSchema(customModules=self.schema, useDefault=False) self.testInit.setupCouch('workqueue_t', *self.couchApps) self.testInit.setupCouch('workqueue_t_inbox', *self.couchApps) return def tearDown(self): """ _tearDown_ Drop all the WMBS tables. """ self.testInit.tearDownCouch() #EmulatorSetup.deleteConfig(self.configFile) EmulatorHelper.resetEmulators() self.specGenerator.removeSpecs() def testWorkQueueService(self): # test getWork specName = "RerecoSpec" specUrl = self.specGenerator.createReRecoSpec(specName, "file") globalQ = globalQueue(DbName='workqueue_t', QueueURL=self.testInit.couchUrl) self.assertTrue(globalQ.queueWork(specUrl, "RerecoSpec", "teamA") > 0) wqApi = WorkQueueDS(self.testInit.couchUrl, 'workqueue_t') #This only checks minimum client call not exactly correctness of return # values. self.assertEqual(wqApi.getTopLevelJobsByRequest(), [{ 'total_jobs': 2, 'request_name': specName }]) self.assertEqual(wqApi.getChildQueues(), []) self.assertEqual(wqApi.getJobStatusByRequest(), [{ 'status': 'Available', 'jobs': 2, 'request_name': specName }]) self.assertEqual(wqApi.getChildQueuesByRequest(), []) self.assertEqual(wqApi.getWMBSUrl(), []) self.assertEqual(wqApi.getWMBSUrlByRequest(), [])
class WorkQueueProfileTest(WorkQueueTestCase): """ _WorkQueueTest_ """ def setUp(self): """ If we dont have a wmspec file create one Warning: For the real profiling test including spec generation. need to use real spec instead of using emulator generated spec which doesn't include couchDB access and cmssw access """ EmulatorHelper.setEmulators(phedex=True, dbs=True, siteDB=True, requestMgr=True) WorkQueueTestCase.setUp(self) self.cacheDir = tempfile.mkdtemp() self.specGenerator = WMSpecGenerator(self.cacheDir) self.specNamePrefix = "TestReReco_" self.specs = self.createReRecoSpec(5, "file") # Create queues self.globalQueue = globalQueue(DbName=self.globalQDB, InboxDbName=self.globalQInboxDB, NegotiationTimeout=0) def tearDown(self): """tearDown""" WorkQueueTestCase.tearDown(self) try: self.specGenerator.removeSpecs() except: pass EmulatorHelper.resetEmulators() def createReRecoSpec(self, numOfSpec, type="spec"): specs = [] for i in range(numOfSpec): specName = "%s%s" % (self.specNamePrefix, (i + 1)) specs.append(self.specGenerator.createReRecoSpec(specName, type)) return specs def createProfile(self, name, function): file = name prof = cProfile.Profile() prof.runcall(function) prof.dump_stats(file) p = pstats.Stats(file) p.strip_dirs().sort_stats('cumulative').print_stats(0.1) p.strip_dirs().sort_stats('time').print_stats(0.1) p.strip_dirs().sort_stats('calls').print_stats(0.1) #p.strip_dirs().sort_stats('name').print_stats(10) def testQueueElementProfile(self): self.createProfile('queueElementProfile.prof', self.multipleQueueWorkCall) def multipleQueueWorkCall(self): i = 0 for wmspec in self.specs: i += 1 self.globalQueue.queueWork(wmspec, self.specNamePrefix + str(i), 'test_team')
class LocalWorkQueueProfileTest(WorkQueueTestCase): """ _WorkQueueTest_ """ def setUp(self): """ If we dont have a wmspec file create one """ EmulatorHelper.setEmulators(phedex=True, dbs=True, siteDB=True, requestMgr=True) WorkQueueTestCase.setUp(self) self.cacheDir = tempfile.mkdtemp() self.specGenerator = WMSpecGenerator(self.cacheDir) self.specs = self.createReRecoSpec(1, "file") # Create queues self.localQueue = localQueue( DbName=self.queueDB, InboxDbName=self.queueInboxDB, NegotiationTimeout=0, QueueURL="global.example.com", CacheDir=self.cacheDir, ) def tearDown(self): """tearDown""" WorkQueueTestCase.tearDown(self) try: shutil.rmtree(self.cacheDir) self.specGenerator.removeSpecs() except: pass EmulatorHelper.resetEmulators() def createReRecoSpec(self, numOfSpec, type="spec"): specs = [] for i in range(numOfSpec): specName = "MinBiasProcessingSpec_Test_%s" % (i + 1) specs.append(self.specGenerator.createReRecoSpec(specName, type)) return specs def createProfile(self, name, function): file = name prof = cProfile.Profile() prof.runcall(function) prof.dump_stats(file) p = pstats.Stats(file) p.strip_dirs().sort_stats("cumulative").print_stats(0.1) p.strip_dirs().sort_stats("time").print_stats(0.1) p.strip_dirs().sort_stats("calls").print_stats(0.1) # p.strip_dirs().sort_stats('name').print_stats(10) def testGetWorkLocalQueue(self): i = 0 for spec in self.specs: i += 1 specName = "MinBiasProcessingSpec_Test_%s" % i self.localQueue.queueWork(spec, specName, team="A-team") self.localQueue.updateLocationInfo() self.createProfile("getWorkProfile.prof", self.localQueueGetWork) def localQueueGetWork(self): siteJobs = {} for site in Globals.SITES: siteJobs[site] = 100000 self.localQueue.getWork(siteJobs, {})