def insertAllSEs(self, siteName, pendingSlots=0, runningSlots=0, ceName=None, plugin=None, taskList=[]): """ _insertAllSEs_ Insert all SEs into WMBS ResourceControl This uses the Services.SiteDB to insert all PNNs under a common CE. It is meant to be used with WMS submission. Sites will be named siteName_PNN It expects a taskList of the following form: [{'taskType': taskType, 'priority': priority, 'maxSlots': maxSlots, 'pendingSlots' : pendingSlots}] for each entry in the taskList, a threshold is inserted into the database for EVERY SE """ from WMCore.Services.SiteDB.SiteDB import SiteDBJSON siteDB = SiteDBJSON() cmsNames = siteDB.getAllCMSNames() for cmsName in cmsNames: pnns = siteDB.cmsNametoPhEDExNode(cmsName) for pnn in pnns: sName = '%s_%s' % (siteName, pnn) self.insertSite(siteName=sName, pendingSlots=pendingSlots, pnn=pnn, runningSlots=runningSlots, ceName=ceName, cmsName=cmsName, plugin=plugin) for task in taskList: if 'maxSlots' not in task or 'taskType' not in task: msg = "Incomplete task in taskList for ResourceControl.insertAllSEs\n" msg += task raise ResourceControlException(msg) self.insertThreshold(siteName=sName, taskType=task['taskType'], maxSlots=task['maxSlots'], pendingSlots=task['pendingSlots']) return
def phedexIt(): x = PhEDEx(responseType = "json") phedexNodes = x.getNodeMap()['phedex']['node'] phedexMap = {} sePhedexMap = {} knownPhedexNodes = set() for node in phedexNodes: phedexMap[node['name']] = node['kind'] #print '%s -> %s, %s' % (node['name'], node['kind'], node['se']) if node['se'] not in sePhedexMap: sePhedexMap[node['se']] = set() sePhedexMap[node['se']].add(node['name']) knownPhedexNodes.add(node['name']) y = SiteDBJSON() seNames = y.getAllSENames() cmsNamesMap = {} for se in seNames: cmsNames = y.seToCMSName(se) cmsNamesMap[se] = cmsNames seToNodeMap = {} for se in cmsNamesMap: candidates = set() for cmsName in cmsNamesMap[se]: phedexNodes = y.cmsNametoPhEDExNode(cmsName) candidates.update(set(phedexNodes)) validCandidates = set() for candidate in candidates: if candidate in knownPhedexNodes: validCandidates.add(candidate) seToNodeMap[se] = validCandidates #print '%s to %s' % (se, candidates) for se in sePhedexMap: if se not in seToNodeMap: print "SE: %s is not in new mapping for sites %s" % (se, list(sePhedexMap[se])) for se in seToNodeMap: if se not in sePhedexMap: print "SE: %s is not in old mapping for sites %s" % (se, list(seToNodeMap[se])) continue for se in set(seToNodeMap.keys()).intersection(set(sePhedexMap.keys())): diff = sePhedexMap[se] - seToNodeMap[se] if diff: print "%s are in old mapping but not in new for %s" %(str(list(diff)), se) diff = seToNodeMap[se] - sePhedexMap[se] if diff: print "%s are in new mapping but not in old for %s" %(str(list(diff)), se)
def insertAllSEs(self, siteName, pendingSlots = 0, runningSlots = 0, ceName = None, plugin = None, taskList = []): """ _insertAllSEs_ Insert all SEs into WMBS ResourceControl This uses the Services.SiteDB to insert all SEs under a common CE. It is meant to be used with WMS submission. Sites will be named siteName_SEName It expects a taskList of the following form: [{'taskType': taskType, 'priority': priority, 'maxSlots': maxSlots, 'pendingSlots' : pendingSlots}] for each entry in the taskList, a threshold is inserted into the database for EVERY SE """ from WMCore.Services.SiteDB.SiteDB import SiteDBJSON siteDB = SiteDBJSON() cmsNames = siteDB.getAllCMSNames() for cmsName in cmsNames: pnns = siteDB.cmsNametoPhEDExNode(cmsName) for pnn in pnns: sName = '%s_%s' % (siteName, pnn) self.insertSite(siteName = sName, pendingSlots = pendingSlots, pnn = pnn, runningSlots = runningSlots, ceName = ceName, cmsName = cmsName, plugin = plugin) for task in taskList: if 'maxSlots' not in task or 'taskType' not in task: msg = "Incomplete task in taskList for ResourceControl.insertAllSEs\n" msg += task raise ResourceControlException(msg) self.insertThreshold(siteName = sName, taskType = task['taskType'], maxSlots = task['maxSlots'], pendingSlots = task['pendingSlots']) return
class SiteDBTest(unittest.TestCase): """ Unit tests for SiteScreening module """ def setUp(self): """ Setup for unit tests """ self.mySiteDB = SiteDBJSON() @attr("integration") def testCmsNametoPhEDExNode(self): """ Tests CmsNametoSE """ target = ['T1_US_FNAL_MSS','T1_US_FNAL_Buffer'] results = self.mySiteDB.cmsNametoPhEDExNode("T1_US_FNAL") self.failUnless(sorted(results) == sorted(target)) @attr("integration") def testPhEDExNodetocmsName(self): """ Tests PhEDExNodetocmsName """ result = self.mySiteDB.phEDExNodetocmsName('T1_US_FNAL_MSS') self.failUnless(result == 'T1_US_FNAL') result = self.mySiteDB.phEDExNodetocmsName('T1_US_FNAL_Buffer') self.failUnless(result == 'T1_US_FNAL') result = self.mySiteDB.phEDExNodetocmsName('T2_UK_London_IC') self.failUnless(result == 'T2_UK_London_IC') # don't check this anymore, see comment in phEDExNodetocmsName function #self.assertRaises(ValueError, self.mySiteDB.phEDExNodetocmsName, # 'T9_DOESNT_EXIST_Buffer') @attr("integration") def testCmsNametoSE(self): """ Tests CmsNametoSE """ target = ['srm-cms.gridpp.rl.ac.uk'] results = self.mySiteDB.cmsNametoSE("T1_UK_RAL") self.failUnless(sorted(results) == sorted(target)) @attr("integration") def testSEtoCmsName(self): """ Tests CmsNametoSE """ target = 'T1_US_FNAL' results = self.mySiteDB.seToCMSName("cmssrm.fnal.gov") self.failUnless(results == target) @attr("integration") def testCmsNametoCE(self): """ Tests CmsNametoCE """ target = ['lcgce06.gridpp.rl.ac.uk', 'lcgce07.gridpp.rl.ac.uk', 'lcgce09.gridpp.rl.ac.uk'] results = self.mySiteDB.cmsNametoCE("T1_UK_RAL") self.failUnless(sorted(results) == target) @attr("integration") def testJSONParser(self): """ Tests the JSON parser directly """ cmsName = "cmsgrid02.hep.wisc.edu" results = self.mySiteDB.getJSON("CEtoCMSName", file="CEtoCMSName", name=cmsName) self.failUnless(results['0']['name'] == "T2_US_Wisconsin") @attr("integration") def testDNUserName(self): """ Tests DN to Username lookup """ testDn = "/C=UK/O=eScience/OU=Bristol/L=IS/CN=simon metson" testUserName = "******" userName = self.mySiteDB.dnUserName(dn=testDn) self.failUnless(testUserName == userName) def testDNWithApostrophe(self): """ Tests a DN with an apostrophy in - will fail till SiteDB2 appears """ testDn = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=liviof/CN=472739/CN=Livio Fano'" testUserName = "******" userName = self.mySiteDB.dnUserName(dn=testDn) self.failUnless(testUserName == userName) @attr("integration") def testParsingJsonWithApostrophe(self): """ Tests parsing a DN json with an apostrophe in """ json = """{"dn": "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=liviof/CN=472739/CN=Livio Fano'", "user": "******"}""" d = self.mySiteDB.parser.dictParser(json) self.assertEquals("/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=liviof/CN=472739/CN=Livio Fano'", d['dn']) @attr("integration") def testParsingInvalidJsonWithApostrophe(self): """ Tests parsing a DN invalid json (from sitedb v1) with an apostrophe in """ json = """{'dn': '/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=liviof/CN=472739/CN=Livio' Fano', 'user': '******'}""" d = self.mySiteDB.parser.dictParser(json) self.assertEquals("/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=liviof/CN=472739/CN=Livio' Fano", d['dn']) json = """{'dn': '/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=liviof/CN=472739/CN=Livio Fano'', 'user': '******'}""" d = self.mySiteDB.parser.dictParser(json) self.assertEquals("/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=liviof/CN=472739/CN=Livio Fano'", d['dn'])
class WMBSHelperTest(EmulatedUnitTestCase): def setUp(self): """ _setUp_ """ super(WMBSHelperTest, self).setUp() self.testInit = TestInitCouchApp(__file__) self.testInit.setLogging() self.testInit.setDatabaseConnection(destroyAllDatabase=True) self.testInit.setupCouch("wmbshelper_t/jobs", "JobDump") self.testInit.setupCouch("wmbshelper_t/fwjrs", "FWJRDump") self.testInit.setupCouch("config_test", "GroupUser", "ConfigCache") os.environ["COUCHDB"] = "wmbshelper_t" self.testInit.setSchema(customModules=["WMCore.WMBS", "WMComponent.DBS3Buffer", "WMCore.BossAir", "WMCore.ResourceControl"], useDefault=False) self.workDir = self.testInit.generateWorkDir() self.wmspec = self.createWMSpec() self.topLevelTask = getFirstTask(self.wmspec) self.inputDataset = self.topLevelTask.inputDataset() self.dataset = self.topLevelTask.getInputDatasetPath() self.dbs = DBSReader(self.inputDataset.dbsurl) self.daoFactory = DAOFactory(package="WMCore.WMBS", logger=threading.currentThread().logger, dbinterface=threading.currentThread().dbi) self.configFile = EmulatorSetup.setupWMAgentConfig() self.config = loadConfigurationFile(self.configFile) self.config.component_("JobSubmitter") self.config.JobSubmitter.submitDir = self.workDir self.config.JobSubmitter.submitScript = os.path.join(getTestBase(), 'WMComponent_t/JobSubmitter_t', 'submit.sh') return def tearDown(self): """ _tearDown_ Clear out the database. """ self.testInit.clearDatabase() self.testInit.tearDownCouch() self.testInit.delWorkDir() EmulatorSetup.deleteConfig(self.configFile) super(WMBSHelperTest, self).tearDown() return def setupForKillTest(self, baAPI=None): """ _setupForKillTest_ Inject a workflow into WMBS that has a processing task, a merge task and a cleanup task. Inject files into the various tasks at various processing states (acquired, complete, available...). Also create jobs for each subscription in various states. """ myThread = threading.currentThread() daoFactory = DAOFactory(package="WMCore.WMBS", logger=myThread.logger, dbinterface=myThread.dbi) dummyLocationAction = daoFactory(classname="Locations.New") changeStateAction = daoFactory(classname="Jobs.ChangeState") resourceControl = ResourceControl() resourceControl.insertSite(siteName='site1', pnn='goodse.cern.ch', ceName='site1', plugin="TestPlugin") resourceControl.insertThreshold(siteName='site1', taskType='Processing', \ maxSlots=10000, pendingSlots=10000) userDN = 'someDN' userAction = daoFactory(classname="Users.New") userAction.execute(dn=userDN, group_name='DEFAULT', role_name='DEFAULT') inputFileset = Fileset("input") inputFileset.create() inputFileA = File("lfnA", locations="goodse.cern.ch") inputFileB = File("lfnB", locations="goodse.cern.ch") inputFileC = File("lfnC", locations="goodse.cern.ch") inputFileA.create() inputFileB.create() inputFileC.create() inputFileset.addFile(inputFileA) inputFileset.addFile(inputFileB) inputFileset.addFile(inputFileC) inputFileset.commit() unmergedOutputFileset = Fileset("unmerged") unmergedOutputFileset.create() unmergedFileA = File("ulfnA", locations="goodse.cern.ch") unmergedFileB = File("ulfnB", locations="goodse.cern.ch") unmergedFileC = File("ulfnC", locations="goodse.cern.ch") unmergedFileA.create() unmergedFileB.create() unmergedFileC.create() unmergedOutputFileset.addFile(unmergedFileA) unmergedOutputFileset.addFile(unmergedFileB) unmergedOutputFileset.addFile(unmergedFileC) unmergedOutputFileset.commit() mainProcWorkflow = Workflow(spec="spec1", owner="Steve", name="Main", task="Proc") mainProcWorkflow.create() mainProcMergeWorkflow = Workflow(spec="spec1", owner="Steve", name="Main", task="ProcMerge") mainProcMergeWorkflow.create() mainCleanupWorkflow = Workflow(spec="spec1", owner="Steve", name="Main", task="Cleanup") mainCleanupWorkflow.create() self.mainProcSub = Subscription(fileset=inputFileset, workflow=mainProcWorkflow, type="Processing") self.mainProcSub.create() self.mainProcSub.acquireFiles(inputFileA) self.mainProcSub.completeFiles(inputFileB) procJobGroup = JobGroup(subscription=self.mainProcSub) procJobGroup.create() self.procJobA = Job(name="ProcJobA") self.procJobA["state"] = "new" self.procJobA["location"] = "site1" self.procJobB = Job(name="ProcJobB") self.procJobB["state"] = "executing" self.procJobB["location"] = "site1" self.procJobC = Job(name="ProcJobC") self.procJobC["state"] = "complete" self.procJobC["location"] = "site1" self.procJobA.create(procJobGroup) self.procJobB.create(procJobGroup) self.procJobC.create(procJobGroup) self.mainMergeSub = Subscription(fileset=unmergedOutputFileset, workflow=mainProcMergeWorkflow, type="Merge") self.mainMergeSub.create() self.mainMergeSub.acquireFiles(unmergedFileA) self.mainMergeSub.failFiles(unmergedFileB) mergeJobGroup = JobGroup(subscription=self.mainMergeSub) mergeJobGroup.create() self.mergeJobA = Job(name="MergeJobA") self.mergeJobA["state"] = "exhausted" self.mergeJobA["location"] = "site1" self.mergeJobB = Job(name="MergeJobB") self.mergeJobB["state"] = "cleanout" self.mergeJobB["location"] = "site1" self.mergeJobC = Job(name="MergeJobC") self.mergeJobC["state"] = "new" self.mergeJobC["location"] = "site1" self.mergeJobA.create(mergeJobGroup) self.mergeJobB.create(mergeJobGroup) self.mergeJobC.create(mergeJobGroup) self.mainCleanupSub = Subscription(fileset=unmergedOutputFileset, workflow=mainCleanupWorkflow, type="Cleanup") self.mainCleanupSub.create() self.mainCleanupSub.acquireFiles(unmergedFileA) self.mainCleanupSub.completeFiles(unmergedFileB) cleanupJobGroup = JobGroup(subscription=self.mainCleanupSub) cleanupJobGroup.create() self.cleanupJobA = Job(name="CleanupJobA") self.cleanupJobA["state"] = "new" self.cleanupJobA["location"] = "site1" self.cleanupJobB = Job(name="CleanupJobB") self.cleanupJobB["state"] = "executing" self.cleanupJobB["location"] = "site1" self.cleanupJobC = Job(name="CleanupJobC") self.cleanupJobC["state"] = "complete" self.cleanupJobC["location"] = "site1" self.cleanupJobA.create(cleanupJobGroup) self.cleanupJobB.create(cleanupJobGroup) self.cleanupJobC.create(cleanupJobGroup) jobList = [self.procJobA, self.procJobB, self.procJobC, self.mergeJobA, self.mergeJobB, self.mergeJobC, self.cleanupJobA, self.cleanupJobB, self.cleanupJobC] changeStateAction.execute(jobList) if baAPI: for job in jobList: job['plugin'] = 'TestPlugin' job['userdn'] = userDN job['usergroup'] = 'DEFAULT' job['userrole'] = 'DEFAULT' job['custom']['location'] = 'site1' baAPI.createNewJobs(wmbsJobs=jobList) # We'll create an unrelated workflow to verify that it isn't affected # by the killing code. bogusFileset = Fileset("dontkillme") bogusFileset.create() bogusFileA = File("bogus/lfnA", locations="goodse.cern.ch") bogusFileA.create() bogusFileset.addFile(bogusFileA) bogusFileset.commit() bogusWorkflow = Workflow(spec="spec2", owner="Steve", name="Bogus", task="Proc") bogusWorkflow.create() self.bogusSub = Subscription(fileset=bogusFileset, workflow=bogusWorkflow, type="Processing") self.bogusSub.create() self.bogusSub.acquireFiles(bogusFileA) return def verifyFileKillStatus(self): """ _verifyFileKillStatus_ Verify that all files were killed correctly. The status of files in Cleanup and LogCollect subscriptions isn't modified. Status of already completed and failed files is not modified. Also verify that the bogus subscription is untouched. """ failedFiles = self.mainProcSub.filesOfStatus("Failed") acquiredFiles = self.mainProcSub.filesOfStatus("Acquired") completedFiles = self.mainProcSub.filesOfStatus("Completed") availableFiles = self.mainProcSub.filesOfStatus("Available") bogusAcquiredFiles = self.bogusSub.filesOfStatus("Acquired") self.assertEqual(len(availableFiles), 0, \ "Error: There should be no available files.") self.assertEqual(len(acquiredFiles), 0, \ "Error: There should be no acquired files.") self.assertEqual(len(bogusAcquiredFiles), 1, \ "Error: There should be one acquired file.") self.assertEqual(len(completedFiles), 3, \ "Error: There should be only one completed file.") goldenLFNs = ["lfnA", "lfnB", "lfnC"] for completedFile in completedFiles: self.assertTrue(completedFile["lfn"] in goldenLFNs, \ "Error: Extra completed file.") goldenLFNs.remove(completedFile["lfn"]) self.assertEqual(len(failedFiles), 0, \ "Error: There should be no failed files.") self.assertEqual(len(goldenLFNs), 0, \ "Error: Missing LFN") failedFiles = self.mainMergeSub.filesOfStatus("Failed") acquiredFiles = self.mainMergeSub.filesOfStatus("Acquired") completedFiles = self.mainMergeSub.filesOfStatus("Completed") availableFiles = self.mainMergeSub.filesOfStatus("Available") self.assertEqual(len(acquiredFiles), 0, \ "Error: Merge subscription should have 0 acq files.") self.assertEqual(len(availableFiles), 0, \ "Error: Merge subscription should have 0 avail files.") self.assertEqual(len(failedFiles), 1, \ "Error: Merge subscription should have 1 failed files.") self.assertEqual(list(failedFiles)[0]["lfn"], "ulfnB", "Error: Wrong failed file.") self.assertEqual(len(completedFiles), 2, \ "Error: Merge subscription should have 2 compl files.") goldenLFNs = ["ulfnA", "ulfnC"] for completedFile in completedFiles: self.assertTrue(completedFile["lfn"] in goldenLFNs, \ "Error: Extra complete file.") goldenLFNs.remove(completedFile["lfn"]) self.assertEqual(len(goldenLFNs), 0, \ "Error: Missing LFN") failedFiles = self.mainCleanupSub.filesOfStatus("Failed") acquiredFiles = self.mainCleanupSub.filesOfStatus("Acquired") completedFiles = self.mainCleanupSub.filesOfStatus("Completed") availableFiles = self.mainCleanupSub.filesOfStatus("Available") self.assertEqual(len(failedFiles), 0, \ "Error: Cleanup subscription should have 0 fai files.") self.assertEqual(len(acquiredFiles), 1, \ "Error: There should be only one acquired file.") self.assertEqual(list(acquiredFiles)[0]["lfn"], "ulfnA", \ "Error: Wrong acquired LFN.") self.assertEqual(len(completedFiles), 1, \ "Error: There should be only one completed file.") self.assertEqual(list(completedFiles)[0]["lfn"], "ulfnB", \ "Error: Wrong completed LFN.") self.assertEqual(len(availableFiles), 1, \ "Error: There should be only one available file.") self.assertEqual(list(availableFiles)[0]["lfn"], "ulfnC", \ "Error: Wrong completed LFN.") return def verifyJobKillStatus(self): """ _verifyJobKillStatus_ Verify that jobs are killed correctly. Jobs belonging to Cleanup and LogCollect subscriptions are not killed. The status of jobs that have already finished running is not changed. """ self.procJobA.load() self.procJobB.load() self.procJobC.load() self.assertEqual(self.procJobA["state"], "killed", \ "Error: Proc job A should be killed.") self.assertEqual(self.procJobB["state"], "killed", \ "Error: Proc job B should be killed.") self.assertEqual(self.procJobC["state"], "complete", \ "Error: Proc job C should be complete.") self.mergeJobA.load() self.mergeJobB.load() self.mergeJobC.load() self.assertEqual(self.mergeJobA["state"], "exhausted", \ "Error: Merge job A should be exhausted.") self.assertEqual(self.mergeJobB["state"], "cleanout", \ "Error: Merge job B should be cleanout.") self.assertEqual(self.mergeJobC["state"], "killed", \ "Error: Merge job C should be killed.") self.cleanupJobA.load() self.cleanupJobB.load() self.cleanupJobC.load() self.assertEqual(self.cleanupJobA["state"], "new", \ "Error: Cleanup job A should be new.") self.assertEqual(self.cleanupJobB["state"], "executing", \ "Error: Cleanup job B should be executing.") self.assertEqual(self.cleanupJobC["state"], "complete", \ "Error: Cleanup job C should be complete.") return def createTestWMSpec(self): """ _createTestWMSpec_ Create a WMSpec that has a processing, merge, cleanup and skims tasks that can be used by the subscription creation test. """ testWorkload = WMWorkloadHelper(WMWorkload("TestWorkload")) testWorkload.setDashboardActivity("TestReReco") testWorkload.setSpecUrl("/path/to/workload") testWorkload.setOwnerDetails("sfoulkes", "DMWM", {'dn': 'MyDN'}) procTask = testWorkload.newTask("ProcessingTask") procTask.setTaskType("Processing") procTask.setSplittingAlgorithm("FileBased", files_per_job=1) procTaskCMSSW = procTask.makeStep("cmsRun1") procTaskCMSSW.setStepType("CMSSW") procTaskCMSSWHelper = procTaskCMSSW.getTypeHelper() procTask.setTaskType("Processing") procTask.setSiteWhitelist(["site1"]) procTask.setSiteBlacklist(["site2"]) procTask.applyTemplates() procTaskCMSSWHelper.addOutputModule("OutputA", primaryDataset="bogusPrimary", processedDataset="bogusProcessed", dataTier="DataTierA", lfnBase="bogusUnmerged", mergedLFNBase="bogusMerged", filterName=None) mergeTask = procTask.addTask("MergeTask") mergeTask.setInputReference(procTaskCMSSW, outputModule="OutputA", dataTier='DataTierA') mergeTask.setTaskType("Merge") mergeTask.setSplittingAlgorithm("WMBSMergeBySize", min_merge_size=1, max_merge_size=2, max_merge_events=3) mergeTaskCMSSW = mergeTask.makeStep("cmsRun1") mergeTaskCMSSW.setStepType("CMSSW") mergeTaskCMSSWHelper = mergeTaskCMSSW.getTypeHelper() mergeTask.setTaskType("Merge") mergeTask.applyTemplates() mergeTaskCMSSWHelper.addOutputModule("Merged", primaryDataset="bogusPrimary", processedDataset="bogusProcessed", dataTier="DataTierA", lfnBase="bogusUnmerged", mergedLFNBase="bogusMerged", filterName=None) cleanupTask = procTask.addTask("CleanupTask") cleanupTask.setInputReference(procTaskCMSSW, outputModule="OutputA", dataTier="DataTierA") cleanupTask.setTaskType("Merge") cleanupTask.setSplittingAlgorithm("SiblingProcessingBased", files_per_job=50) cleanupTaskCMSSW = cleanupTask.makeStep("cmsRun1") cleanupTaskCMSSW.setStepType("CMSSW") cleanupTask.setTaskType("Cleanup") cleanupTask.applyTemplates() skimTask = mergeTask.addTask("SkimTask") skimTask.setTaskType("Skim") skimTask.setInputReference(mergeTaskCMSSW, outputModule="Merged", dataTier="DataTierA") skimTask.setSplittingAlgorithm("FileBased", files_per_job=1, include_parents=True) skimTaskCMSSW = skimTask.makeStep("cmsRun1") skimTaskCMSSW.setStepType("CMSSW") skimTaskCMSSWHelper = skimTaskCMSSW.getTypeHelper() skimTask.setTaskType("Skim") skimTask.applyTemplates() skimTaskCMSSWHelper.addOutputModule("SkimOutputA", primaryDataset="bogusPrimary", processedDataset="bogusProcessed", dataTier="DataTierA", lfnBase="bogusUnmerged", mergedLFNBase="bogusMerged", filterName=None) skimTaskCMSSWHelper.addOutputModule("SkimOutputB", primaryDataset="bogusPrimary", processedDataset="bogusProcessed", dataTier="DataTierB", lfnBase="bogusUnmerged", mergedLFNBase="bogusMerged", filterName=None) return testWorkload def setupMCWMSpec(self): """Setup MC workflow""" self.wmspec = self.createMCWMSpec() self.topLevelTask = getFirstTask(self.wmspec) self.inputDataset = self.topLevelTask.inputDataset() self.dataset = self.topLevelTask.getInputDatasetPath() self.dbs = None self.siteDB = SiteDBJSON() # add sites that would normally be added by operator via resource_control locationDAO = self.daoFactory(classname="Locations.New") self.pnns = [] for site in ['T2_XX_SiteA', 'T2_XX_SiteB']: locationDAO.execute(siteName=site, pnn=self.siteDB.cmsNametoPhEDExNode(site)[0]) self.pnns.append(self.siteDB.cmsNametoPhEDExNode(site)[0]) def createWMSpec(self, name='ReRecoWorkload'): factory = ReRecoWorkloadFactory() rerecoArgs["ConfigCacheID"] = createConfig(rerecoArgs["CouchDBName"]) wmspec = factory.factoryWorkloadConstruction(name, rerecoArgs) wmspec.setSpecUrl("/path/to/workload") wmspec.setSubscriptionInformation(custodialSites=[], nonCustodialSites=[], autoApproveSites=[], priority="Low", custodialSubType="Move") return wmspec def createMCWMSpec(self, name='MonteCarloWorkload'): mcArgs['CouchDBName'] = rerecoArgs["CouchDBName"] mcArgs["ConfigCacheID"] = createConfig(mcArgs["CouchDBName"]) wmspec = monteCarloWorkload(name, mcArgs) wmspec.setSpecUrl("/path/to/workload") getFirstTask(wmspec).addProduction(totalevents=10000) return wmspec def getDBS(self, wmspec): topLevelTask = getFirstTask(wmspec) inputDataset = topLevelTask.inputDataset() dbs = DBSReader(inputDataset.dbsurl) # dbsDict = {self.inputDataset.dbsurl : self.dbs} return dbs def createWMBSHelperWithTopTask(self, wmspec, block, mask=None, parentFlag=False, detail=False): topLevelTask = getFirstTask(wmspec) wmbs = WMBSHelper(wmspec, topLevelTask.name(), block, mask, cachepath=self.workDir) if block: if parentFlag: block = self.dbs.getFileBlockWithParents(block)[block] else: block = self.dbs.getFileBlock(block)[block] sub, files = wmbs.createSubscriptionAndAddFiles(block=block) if detail: return wmbs, sub, files else: return wmbs def testKillWorkflow(self): """ _testKillWorkflow_ Verify that workflow killing works correctly. """ baAPI = BossAirAPI(config=self.config, insertStates=True) # Create nine jobs self.setupForKillTest(baAPI=baAPI) self.assertEqual(len(baAPI._listRunJobs()), 9) killWorkflow("Main", self.config, self.config) self.verifyFileKillStatus() self.verifyJobKillStatus() self.assertEqual(len(baAPI._listRunJobs()), 8) return def testCreateSubscription(self): """ _testCreateSubscription_ Verify that the subscription creation code works correctly. """ resourceControl = ResourceControl() resourceControl.insertSite(siteName='site1', pnn='goodse.cern.ch', ceName='site1', plugin="TestPlugin") resourceControl.insertSite(siteName='site2', pnn='goodse2.cern.ch', ceName='site2', plugin="TestPlugin") testWorkload = self.createTestWMSpec() testTopLevelTask = getFirstTask(testWorkload) testWMBSHelper = WMBSHelper(testWorkload, testTopLevelTask.name(), "SomeBlock", cachepath=self.workDir) testWMBSHelper.createTopLevelFileset() testWMBSHelper._createSubscriptionsInWMBS(testTopLevelTask, testWMBSHelper.topLevelFileset) procWorkflow = Workflow(name="TestWorkload", task="/TestWorkload/ProcessingTask") procWorkflow.load() self.assertEqual(procWorkflow.owner, "sfoulkes", "Error: Wrong owner: %s" % procWorkflow.owner) self.assertEqual(procWorkflow.group, "DMWM", "Error: Wrong group: %s" % procWorkflow.group) self.assertEqual(procWorkflow.wfType, "TestReReco", "Error: Wrong type.") self.assertEqual(procWorkflow.spec, os.path.join(self.workDir, procWorkflow.name, "WMSandbox", "WMWorkload.pkl"), "Error: Wrong spec URL") self.assertEqual(len(procWorkflow.outputMap.keys()), 1, "Error: Wrong number of WF outputs.") mergedProcOutput = procWorkflow.outputMap["OutputADataTierA"][0]["merged_output_fileset"] unmergedProcOutput = procWorkflow.outputMap["OutputADataTierA"][0]["output_fileset"] mergedProcOutput.loadData() unmergedProcOutput.loadData() self.assertEqual(mergedProcOutput.name, "/TestWorkload/ProcessingTask/MergeTask/merged-MergedDataTierA", "Error: Merged output fileset is wrong.") self.assertEqual(unmergedProcOutput.name, "/TestWorkload/ProcessingTask/unmerged-OutputADataTierA", "Error: Unmerged output fileset is wrong.") mergeWorkflow = Workflow(name="TestWorkload", task="/TestWorkload/ProcessingTask/MergeTask") mergeWorkflow.load() self.assertEqual(mergeWorkflow.owner, "sfoulkes", "Error: Wrong owner.") self.assertEqual(mergeWorkflow.spec, os.path.join(self.workDir, mergeWorkflow.name, "WMSandbox", "WMWorkload.pkl"), "Error: Wrong spec URL") self.assertEqual(len(mergeWorkflow.outputMap.keys()), 1, "Error: Wrong number of WF outputs.") cleanupWorkflow = Workflow(name="TestWorkload", task="/TestWorkload/ProcessingTask/CleanupTask") cleanupWorkflow.load() self.assertEqual(cleanupWorkflow.owner, "sfoulkes", "Error: Wrong owner.") self.assertEqual(cleanupWorkflow.spec, os.path.join(self.workDir, cleanupWorkflow.name, "WMSandbox", "WMWorkload.pkl"), "Error: Wrong spec URL") self.assertEqual(len(cleanupWorkflow.outputMap.keys()), 0, "Error: Wrong number of WF outputs.") unmergedMergeOutput = mergeWorkflow.outputMap["MergedDataTierA"][0]["output_fileset"] unmergedMergeOutput.loadData() self.assertEqual(unmergedMergeOutput.name, "/TestWorkload/ProcessingTask/MergeTask/merged-MergedDataTierA", "Error: Unmerged output fileset is wrong.") skimWorkflow = Workflow(name="TestWorkload", task="/TestWorkload/ProcessingTask/MergeTask/SkimTask") skimWorkflow.load() self.assertEqual(skimWorkflow.owner, "sfoulkes", "Error: Wrong owner.") self.assertEqual(skimWorkflow.spec, os.path.join(self.workDir, skimWorkflow.name, "WMSandbox", "WMWorkload.pkl"), "Error: Wrong spec URL") self.assertEqual(len(skimWorkflow.outputMap.keys()), 2, "Error: Wrong number of WF outputs.") mergedSkimOutputA = skimWorkflow.outputMap["SkimOutputADataTierA"][0]["merged_output_fileset"] unmergedSkimOutputA = skimWorkflow.outputMap["SkimOutputADataTierA"][0]["output_fileset"] mergedSkimOutputB = skimWorkflow.outputMap["SkimOutputBDataTierB"][0]["merged_output_fileset"] unmergedSkimOutputB = skimWorkflow.outputMap["SkimOutputBDataTierB"][0]["output_fileset"] mergedSkimOutputA.loadData() mergedSkimOutputB.loadData() unmergedSkimOutputA.loadData() unmergedSkimOutputB.loadData() self.assertEqual(mergedSkimOutputA.name, "/TestWorkload/ProcessingTask/MergeTask/SkimTask/unmerged-SkimOutputADataTierA", "Error: Merged output fileset is wrong: %s" % mergedSkimOutputA.name) self.assertEqual(unmergedSkimOutputA.name, "/TestWorkload/ProcessingTask/MergeTask/SkimTask/unmerged-SkimOutputADataTierA", "Error: Unmerged output fileset is wrong.") self.assertEqual(mergedSkimOutputB.name, "/TestWorkload/ProcessingTask/MergeTask/SkimTask/unmerged-SkimOutputBDataTierB", "Error: Merged output fileset is wrong.") self.assertEqual(unmergedSkimOutputB.name, "/TestWorkload/ProcessingTask/MergeTask/SkimTask/unmerged-SkimOutputBDataTierB", "Error: Unmerged output fileset is wrong.") topLevelFileset = Fileset(name="TestWorkload-ProcessingTask-SomeBlock") topLevelFileset.loadData() procSubscription = Subscription(fileset=topLevelFileset, workflow=procWorkflow) procSubscription.loadData() self.assertEqual(len(procSubscription.getWhiteBlackList()), 2, "Error: Wrong site white/black list for proc sub.") for site in procSubscription.getWhiteBlackList(): if site["site_name"] == "site1": self.assertEqual(site["valid"], 1, "Error: Site should be white listed.") else: self.assertEqual(site["valid"], 0, "Error: Site should be black listed.") self.assertEqual(procSubscription["type"], "Processing", "Error: Wrong subscription type.") self.assertEqual(procSubscription["split_algo"], "FileBased", "Error: Wrong split algo.") mergeSubscription = Subscription(fileset=unmergedProcOutput, workflow=mergeWorkflow) mergeSubscription.loadData() self.assertEqual(len(mergeSubscription.getWhiteBlackList()), 0, "Error: Wrong white/black list for merge sub.") self.assertEqual(mergeSubscription["type"], "Merge", "Error: Wrong subscription type.") self.assertEqual(mergeSubscription["split_algo"], "WMBSMergeBySize", "Error: Wrong split algo.") skimSubscription = Subscription(fileset=unmergedMergeOutput, workflow=skimWorkflow) skimSubscription.loadData() self.assertEqual(skimSubscription["type"], "Skim", "Error: Wrong subscription type.") self.assertEqual(skimSubscription["split_algo"], "FileBased", "Error: Wrong split algo.") return def testTruncatedWFInsertion(self): """ _testTruncatedWFInsertion_ """ resourceControl = ResourceControl() resourceControl.insertSite(siteName='site1', pnn='goodse.cern.ch', ceName='site1', plugin="TestPlugin") resourceControl.insertSite(siteName='site2', pnn='goodse2.cern.ch', ceName='site2', plugin="TestPlugin") testWorkload = self.createTestWMSpec() testTopLevelTask = getFirstTask(testWorkload) testWMBSHelper = WMBSHelper(testWorkload, testTopLevelTask.name(), "SomeBlock", cachepath=self.workDir) testWMBSHelper.createTopLevelFileset() testWMBSHelper._createSubscriptionsInWMBS(testTopLevelTask, testWMBSHelper.topLevelFileset) testWorkload.truncate("ResubmitTestWorkload", "/TestWorkload/ProcessingTask/MergeTask", "someserver", "somedatabase") # create the subscription for multiple top task (MergeTask and CleanupTask for the same block) for task in testWorkload.getTopLevelTask(): testResubmitWMBSHelper = WMBSHelper(testWorkload, task.name(), "SomeBlock2", cachepath=self.workDir) testResubmitWMBSHelper.createTopLevelFileset() testResubmitWMBSHelper._createSubscriptionsInWMBS(task, testResubmitWMBSHelper.topLevelFileset) mergeWorkflow = Workflow(name="ResubmitTestWorkload", task="/ResubmitTestWorkload/MergeTask") mergeWorkflow.load() self.assertEqual(mergeWorkflow.owner, "sfoulkes", "Error: Wrong owner.") self.assertEqual(mergeWorkflow.spec, os.path.join(self.workDir, mergeWorkflow.name, "WMSandbox", "WMWorkload.pkl"), "Error: Wrong spec URL") self.assertEqual(len(mergeWorkflow.outputMap.keys()), 1, "Error: Wrong number of WF outputs.") unmergedMergeOutput = mergeWorkflow.outputMap["MergedDataTierA"][0]["output_fileset"] unmergedMergeOutput.loadData() self.assertEqual(unmergedMergeOutput.name, "/ResubmitTestWorkload/MergeTask/merged-MergedDataTierA", "Error: Unmerged output fileset is wrong.") skimWorkflow = Workflow(name="ResubmitTestWorkload", task="/ResubmitTestWorkload/MergeTask/SkimTask") skimWorkflow.load() self.assertEqual(skimWorkflow.owner, "sfoulkes", "Error: Wrong owner.") self.assertEqual(skimWorkflow.spec, os.path.join(self.workDir, skimWorkflow.name, "WMSandbox", "WMWorkload.pkl"), "Error: Wrong spec URL") self.assertEqual(len(skimWorkflow.outputMap.keys()), 2, "Error: Wrong number of WF outputs.") mergedSkimOutputA = skimWorkflow.outputMap["SkimOutputADataTierA"][0]["merged_output_fileset"] unmergedSkimOutputA = skimWorkflow.outputMap["SkimOutputADataTierA"][0]["output_fileset"] mergedSkimOutputB = skimWorkflow.outputMap["SkimOutputBDataTierB"][0]["merged_output_fileset"] unmergedSkimOutputB = skimWorkflow.outputMap["SkimOutputBDataTierB"][0]["output_fileset"] mergedSkimOutputA.loadData() mergedSkimOutputB.loadData() unmergedSkimOutputA.loadData() unmergedSkimOutputB.loadData() self.assertEqual(mergedSkimOutputA.name, "/ResubmitTestWorkload/MergeTask/SkimTask/unmerged-SkimOutputADataTierA", "Error: Merged output fileset is wrong: %s" % mergedSkimOutputA.name) self.assertEqual(unmergedSkimOutputA.name, "/ResubmitTestWorkload/MergeTask/SkimTask/unmerged-SkimOutputADataTierA", "Error: Unmerged output fileset is wrong.") self.assertEqual(mergedSkimOutputB.name, "/ResubmitTestWorkload/MergeTask/SkimTask/unmerged-SkimOutputBDataTierB", "Error: Merged output fileset is wrong.") self.assertEqual(unmergedSkimOutputB.name, "/ResubmitTestWorkload/MergeTask/SkimTask/unmerged-SkimOutputBDataTierB", "Error: Unmerged output fileset is wrong.") topLevelFileset = Fileset(name="ResubmitTestWorkload-MergeTask-SomeBlock2") topLevelFileset.loadData() mergeSubscription = Subscription(fileset=topLevelFileset, workflow=mergeWorkflow) mergeSubscription.loadData() self.assertEqual(len(mergeSubscription.getWhiteBlackList()), 0, "Error: Wrong white/black list for merge sub.") self.assertEqual(mergeSubscription["type"], "Merge", "Error: Wrong subscription type.") self.assertEqual(mergeSubscription["split_algo"], "WMBSMergeBySize", "Error: Wrong split algo.") skimSubscription = Subscription(fileset=unmergedMergeOutput, workflow=skimWorkflow) skimSubscription.loadData() self.assertEqual(skimSubscription["type"], "Skim", "Error: Wrong subscription type.") self.assertEqual(skimSubscription["split_algo"], "FileBased", "Error: Wrong split algo.") return def testReReco(self): """ReReco workflow""" # create workflow block = self.dataset + "#" + BLOCK1 wmbs = self.createWMBSHelperWithTopTask(self.wmspec, block) files = wmbs.validFiles(self.dbs.getFileBlock(block)) self.assertEqual(len(files), 1) def testReRecoBlackRunRestriction(self): """ReReco workflow with Run restrictions""" block = self.dataset + "#" + BLOCK2 self.topLevelTask.setInputRunBlacklist([181183]) # Set run blacklist to only run in the block wmbs = self.createWMBSHelperWithTopTask(self.wmspec, block) files = wmbs.validFiles(self.dbs.getFileBlock(block)[block]['Files']) self.assertEqual(len(files), 0) def testReRecoWhiteRunRestriction(self): block = self.dataset + "#" + BLOCK2 self.topLevelTask.setInputRunWhitelist([181183]) # Set run whitelist to only run in the block wmbs = self.createWMBSHelperWithTopTask(self.wmspec, block) files = wmbs.validFiles(self.dbs.getFileBlock(block)[block]['Files']) self.assertEqual(len(files), 1) def testLumiMaskRestrictionsOK(self): block = self.dataset + "#" + BLOCK1 self.wmspec.getTopLevelTask()[0].data.input.splitting.runs = ['181367'] self.wmspec.getTopLevelTask()[0].data.input.splitting.lumis = ['57,80'] wmbs = self.createWMBSHelperWithTopTask(self.wmspec, block) files = wmbs.validFiles(self.dbs.getFileBlock(block)[block]['Files']) self.assertEqual(len(files), 1) def testLumiMaskRestrictionsKO(self): block = self.dataset + "#" + BLOCK1 self.wmspec.getTopLevelTask()[0].data.input.splitting.runs = ['123454321'] self.wmspec.getTopLevelTask()[0].data.input.splitting.lumis = ['123,123'] wmbs = self.createWMBSHelperWithTopTask(self.wmspec, block) files = wmbs.validFiles(self.dbs.getFileBlock(block)[block]['Files']) self.assertEqual(len(files), 0) def testDuplicateFileInsert(self): # using default wmspec block = self.dataset + "#" + BLOCK1 wmbs = self.createWMBSHelperWithTopTask(self.wmspec, block) wmbs.topLevelFileset.loadData() numOfFiles = len(wmbs.topLevelFileset.files) # check initially inserted files. dbsFiles = self.dbs.getFileBlock(block)[block]['Files'] self.assertEqual(numOfFiles, len(dbsFiles)) firstFileset = wmbs.topLevelFileset wmbsDao = wmbs.daofactory(classname="Files.InFileset") numOfFiles = len(wmbsDao.execute(firstFileset.id)) self.assertEqual(numOfFiles, len(dbsFiles)) # use the new spec with same inputdataset block = self.dataset + "#" + BLOCK1 wmspec = self.createWMSpec("TestSpec1") dbs = self.getDBS(wmspec) wmbs = self.createWMBSHelperWithTopTask(wmspec, block) # check duplicate insert dbsFiles = dbs.getFileBlock(block)[block]['Files'] numOfFiles = wmbs.addFiles(dbs.getFileBlock(block)[block]) self.assertEqual(numOfFiles, 0) secondFileset = wmbs.topLevelFileset wmbsDao = wmbs.daofactory(classname="Files.InFileset") numOfFiles = len(wmbsDao.execute(secondFileset.id)) self.assertEqual(numOfFiles, len(dbsFiles)) self.assertNotEqual(firstFileset.id, secondFileset.id) def testDuplicateSubscription(self): """Can't duplicate subscriptions""" # using default wmspec block = self.dataset + "#" + BLOCK1 wmbs = self.createWMBSHelperWithTopTask(self.wmspec, block) wmbs.topLevelFileset.loadData() numOfFiles = len(wmbs.topLevelFileset.files) filesetId = wmbs.topLevelFileset.id subId = wmbs.topLevelSubscription['id'] # check initially inserted files. dbsFiles = self.dbs.getFileBlock(block)[block]['Files'] self.assertEqual(numOfFiles, len(dbsFiles)) # Not clear what's supposed to happen here, 2nd test is completely redundant dummyFirstFileset = wmbs.topLevelFileset self.assertEqual(numOfFiles, len(dbsFiles)) # reinsert subscription - shouldn't create anything new wmbs = self.createWMBSHelperWithTopTask(self.wmspec, block) wmbs.topLevelFileset.loadData() self.assertEqual(numOfFiles, len(wmbs.topLevelFileset.files)) self.assertEqual(filesetId, wmbs.topLevelFileset.id) self.assertEqual(subId, wmbs.topLevelSubscription['id']) # now do a montecarlo workflow self.setupMCWMSpec() mask = Mask(FirstRun=12, FirstLumi=1234, FirstEvent=12345, LastEvent=999995, LastLumi=12345, LastRun=12) wmbs = self.createWMBSHelperWithTopTask(self.wmspec, None, mask) wmbs.topLevelFileset.loadData() numOfFiles = len(wmbs.topLevelFileset.files) filesetId = wmbs.topLevelFileset.id subId = wmbs.topLevelSubscription['id'] # check initially inserted files. # Not clear what's supposed to happen here, 2nd test is completely redundant numDbsFiles = 1 self.assertEqual(numOfFiles, numDbsFiles) dummyFirstFileset = wmbs.topLevelFileset self.assertEqual(numOfFiles, numDbsFiles) # reinsert subscription - shouldn't create anything new wmbs = self.createWMBSHelperWithTopTask(self.wmspec, None, mask) wmbs.topLevelFileset.loadData() self.assertEqual(numOfFiles, len(wmbs.topLevelFileset.files)) self.assertEqual(filesetId, wmbs.topLevelFileset.id) self.assertEqual(subId, wmbs.topLevelSubscription['id']) def testParentage(self): """ 1. check whether parent files are created in wmbs. 2. check parent files are associated to child. 3. When 2 specs with the same input data (one with parent processing, one without it) is inserted, if one without parent processing inserted first then the other with parent processing insert, it still needs to create parent files although child files are duplicate """ # Swap out the dataset for one that has parents task = next(self.wmspec.taskIterator()) oldDS = task.inputDataset() # Copy the old dataset, only will use DBS URL from it task.addInputDataset(name="/Cosmics/ComissioningHI-PromptReco-v1/RECO", primary='Cosmics', processed='ComissioningHI-PromptReco-v1', tier='RECO', dbsurl=oldDS.dbsurl) block = '/Cosmics/ComissioningHI-PromptReco-v1/RECO' + '#5b89ba9c-0dbf-11e1-9b6c-003048caaace' # File creation without parents wmbs, _, numFiles = self.createWMBSHelperWithTopTask(self.wmspec, block, parentFlag=False, detail=True) self.assertEqual(8, numFiles) wmbs.topLevelFileset.loadData() for child in wmbs.topLevelFileset.files: self.assertEqual(len(child["parents"]), 0) # no parents per child # File creation with parents wmbs, _, numFiles = self.createWMBSHelperWithTopTask(self.wmspec, block, parentFlag=True, detail=True) self.assertEqual(8, numFiles) wmbs.topLevelFileset.loadData() for child in wmbs.topLevelFileset.files: self.assertEqual(len(child["parents"]), 1) # one parent per child def testMCFakeFileInjection(self): """Inject fake Monte Carlo files into WMBS""" # This test is failing because the name of the couch DB is set to None # in TestMonteCarloWorkloadFactory.getMCArgs() but changing it to # "reqmgr_config_cache_t" from StdBase test arguments does not fix the # situation. testDuplicateSubscription probably has the same issue self.setupMCWMSpec() mask = Mask(FirstRun=12, FirstLumi=1234, FirstEvent=12345, LastEvent=999995, LastLumi=12345, LastRun=12) wmbs = self.createWMBSHelperWithTopTask(self.wmspec, None, mask) subscription = wmbs.topLevelSubscription self.assertEqual(1, subscription.exists()) fileset = subscription['fileset'] self.assertEqual(1, fileset.exists()) fileset.loadData() # need to refresh from database self.assertEqual(len(fileset.files), 1) self.assertEqual(len(fileset.parents), 0) self.assertFalse(fileset.open) firstFile = list(fileset.files)[0] self.assertEqual(firstFile['events'], mask['LastEvent'] - mask['FirstEvent'] + 1) # inclusive range self.assertEqual(firstFile['merged'], False) # merged files get added to dbs self.assertEqual(len(firstFile['parents']), 0) # firstFile.loadData() self.assertEqual(sorted(firstFile['locations']), sorted(self.pnns)) self.assertEqual(len(firstFile.getParentLFNs()), 0) self.assertEqual(len(firstFile.getRuns()), 1) run = firstFile.getRuns()[0] self.assertEqual(run.run, mask['FirstRun']) self.assertEqual(run.lumis[0], mask['FirstLumi']) self.assertEqual(run.lumis[-1], mask['LastLumi']) self.assertEqual(len(run.lumis), mask['LastLumi'] - mask['FirstLumi'] + 1)
class SiteDBTest(unittest.TestCase): """ Unit tests for SiteScreening module """ def setUp(self): """ Setup for unit tests """ self.mySiteDB = SiteDBJSON() def testCmsNametoPhEDExNode(self): """ Tests CmsNametoSE """ target = ['T1_US_FNAL_MSS','T1_US_FNAL_Buffer'] results = self.mySiteDB.cmsNametoPhEDExNode("T1_US_FNAL") self.failUnless(sorted(results) == sorted(target)) target = ['T1_US_FNAL_Disk'] results = self.mySiteDB.cmsNametoPhEDExNode("T1_US_FNAL_Disk") self.failUnless(sorted(results) == sorted(target)) def testCmsNametoSE(self): """ Tests CmsNametoSE """ target = [u'srm-cms-disk.gridpp.rl.ac.uk', u'srm-cms.gridpp.rl.ac.uk'] results = self.mySiteDB.cmsNametoSE("T1_UK_RAL") self.failUnless(sorted(results) == sorted(target)) def testCmsNamePatterntoSE(self): """ Tests CmsNamePatterntoSE """ target = [u'srm-eoscms.cern.ch', u'srm-eoscms.cern.ch', u'storage01.lcg.cscs.ch', u'eoscmsftp.cern.ch'] results = self.mySiteDB.cmsNametoSE("%T2_CH") self.failUnless(sorted(results) == sorted(target)) def testSEtoCmsName(self): """ Tests CmsNametoSE """ target = [u'T1_US_FNAL'] results = self.mySiteDB.seToCMSName("cmssrm.fnal.gov") self.failUnless(results == target) target = sorted([u'T2_CH_CERN', u'T2_CH_CERN_HLT']) results = sorted(self.mySiteDB.seToCMSName("srm-eoscms.cern.ch")) self.failUnless(sorted(results) == sorted(target)) target = sorted([u'T0_CH_CERN', u'T1_CH_CERN']) results = sorted(self.mySiteDB.seToCMSName("srm-cms.cern.ch")) self.failUnless(sorted(results) == sorted(target)) target = sorted([u'T2_CH_CERN_AI']) results = sorted(self.mySiteDB.seToCMSName("eoscmsftp.cern.ch")) self.failUnless(sorted(results) == sorted(target)) def testDNUserName(self): """ Tests DN to Username lookup """ testDn = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=gutsche/CN=582680/CN=Oliver Gutsche" testUserName = "******" userName = self.mySiteDB.dnUserName(dn=testDn) self.failUnless(testUserName == userName) def testDNWithApostrophe(self): """ Tests a DN with an apostrophy in - will fail till SiteDB2 appears """ testDn = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=liviof/CN=472739/CN=Livio Fano'" testUserName = "******" userName = self.mySiteDB.dnUserName(dn=testDn) self.failUnless(testUserName == userName) def testSEFinder(self): """ _testSEFinder_ See if we can retrieve seNames from all sites """ seNames = self.mySiteDB.getAllSENames() self.assertTrue(len(seNames) > 1) self.assertTrue('cmssrm.fnal.gov' in seNames) return def testPNNtoPSN(self): """ _testPNNtoPSN_ Test converting PhEDEx Node Name to Processing Site Name """ result = self.mySiteDB.PNNtoPSN('T1_US_FNAL_Disk') self.failUnless(result == ['T1_US_FNAL']) result = self.mySiteDB.PNNtoPSN('T1_US_FNAL_Tape') self.failUnless(result == []) result = self.mySiteDB.PNNtoPSN('T2_UK_London_IC') self.failUnless(result == ['T2_UK_London_IC']) return def testCMSNametoList(self): result = self.mySiteDB.cmsNametoList("T1_US*", "SE") self.failUnless(result == [u'cmssrm.fnal.gov', u'cmssrmdisk.fnal.gov'])
class SiteDBTest(EmulatedUnitTestCase): """ Unit tests for SiteScreening module """ def __init__(self, methodName='runTest'): super(SiteDBTest, self).__init__(methodName=methodName) def setUp(self): """ Setup for unit tests """ super(SiteDBTest, self).setUp() self.mySiteDB = SiteDBJSON() def testCmsNametoPhEDExNode(self): """ Tests CMS Name to PhEDEx Node Name """ target = ['T1_US_FNAL_Buffer', 'T1_US_FNAL_MSS'] results = self.mySiteDB.cmsNametoPhEDExNode('T1_US_FNAL') self.assertItemsEqual(results, target) def testDNUserName(self): """ Tests DN to Username lookup """ testDn = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=jha/CN=618566/CN=Manoj Jha" testUserName = "******" userName = self.mySiteDB.dnUserName(dn=testDn) self.assertTrue(testUserName == userName) def testDNWithApostrophe(self): """ Tests a DN with an apostrophy in - will fail till SiteDB2 appears """ testDn = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=liviof/CN=472739/CN=Livio Fano'" testUserName = "******" userName = self.mySiteDB.dnUserName(dn=testDn) self.assertTrue(testUserName == userName) def testSEFinder(self): """ _testSEFinder_ See if we can retrieve seNames from all sites """ seNames = self.mySiteDB.getAllSENames() self.assertTrue(len(seNames) > 1) self.assertTrue('cmsdcadisk01.fnal.gov' in seNames) return def testPNNtoPSN(self): """ _testPNNtoPSN_ Test converting PhEDEx Node Name to Processing Site Name """ result = self.mySiteDB.PNNtoPSN('T1_US_FNAL_Disk') self.assertTrue(result == ['T1_US_FNAL']) result = self.mySiteDB.PNNtoPSN('T1_US_FNAL_Tape') self.assertTrue(result == []) result = self.mySiteDB.PNNtoPSN('T2_UK_London_IC') self.assertTrue(result == ['T2_UK_London_IC']) return def testCMSNametoList(self): """ Test PNN to storage list """ result = self.mySiteDB.cmsNametoList("T1_US*", "SE") self.assertItemsEqual(result, [u'cmsdcadisk01.fnal.gov']) def testPNNstoPSNs(self): """ _testPNNstoPSNs_ Test converting PhEDEx Node Names to Processing Site Names """ result = self.mySiteDB.PNNstoPSNs(['T1_US_FNAL_Disk', 'T1_US_FNAL_Buffer', 'T1_US_FNAL_MSS']) self.assertTrue(result == ['T1_US_FNAL']) result = self.mySiteDB.PNNstoPSNs(['T2_UK_London_IC', 'T2_US_Purdue']) self.assertItemsEqual(result, ['T2_UK_London_IC', 'T2_US_Purdue']) return def testPSNtoPNNMap(self): """ _PSNtoPNNMap_ Test API to get a map of PSNs and PNNs """ result = self.mySiteDB.PSNtoPNNMap() self.assertTrue([psn for psn in result.keys() if psn.startswith('T1_')]) self.assertTrue([psn for psn in result.keys() if psn.startswith('T2_')]) self.assertTrue([psn for psn in result.keys() if psn.startswith('T3_')]) self.assertTrue(len(result) > 50) result = self.mySiteDB.PSNtoPNNMap(psnPattern='T1.*') self.assertFalse([psn for psn in result.keys() if not psn.startswith('T1_')]) self.assertTrue(len(result) < 10) result = self.mySiteDB.PSNtoPNNMap(psnPattern='T2.*') self.assertFalse([psn for psn in result.keys() if not psn.startswith('T2_')]) self.assertTrue(len(result) > 10) result = self.mySiteDB.PSNtoPNNMap(psnPattern='T3.*') self.assertFalse([psn for psn in result.keys() if not psn.startswith('T3_')]) self.assertTrue(len(result) > 10) return def testGetAllPhEDExNodeNames(self): """ _testGetAllPhEDExNodeNames_ Test API to get all PhEDEx Node Names """ result = self.mySiteDB.getAllPhEDExNodeNames(excludeBuffer=True) self.assertFalse([pnn for pnn in result if pnn.endswith('_Buffer')]) result = self.mySiteDB.getAllPhEDExNodeNames(excludeBuffer=False) self.assertTrue(len([pnn for pnn in result if pnn.endswith('_Buffer')]) > 5) result = self.mySiteDB.getAllPhEDExNodeNames(pattern='T1.*', excludeBuffer=True) self.assertFalse([pnn for pnn in result if not pnn.startswith('T1_')]) self.assertTrue(len(result) > 10) result = self.mySiteDB.getAllPhEDExNodeNames(pattern='.*', excludeBuffer=True) self.assertTrue([pnn for pnn in result if pnn.startswith('T1_')]) self.assertTrue([pnn for pnn in result if pnn.startswith('T2_')]) self.assertTrue([pnn for pnn in result if pnn.startswith('T3_')]) self.assertTrue(len(result) > 60) return
class SiteDBTest(EmulatedUnitTestCase): """ Unit tests for SiteScreening module """ def __init__(self, methodName='runTest'): super(SiteDBTest, self).__init__(methodName=methodName) def setUp(self): """ Setup for unit tests """ super(SiteDBTest, self).setUp() EmulatorHelper.setEmulators(phedex=False, dbs=False, siteDB=False, requestMgr=True) self.mySiteDB = SiteDBJSON() def tearDown(self): """ _tearDown_ """ super(SiteDBTest, self).tearDown() EmulatorHelper.resetEmulators() return def testCmsNametoPhEDExNode(self): """ #Tests CmsNametoSE """ target = ['T1_US_FNAL_Buffer', 'T1_US_FNAL_MSS'] results = self.mySiteDB.cmsNametoPhEDExNode('T1_US_FNAL') self.assertItemsEqual(results, target) def testSEtoCmsName(self): """ Tests CmsNametoSE """ target = [u'T1_US_FNAL', u'T1_US_FNAL_Disk'] results = self.mySiteDB.seToCMSName("cmsdcadisk01.fnal.gov") self.assertTrue(results == target) target = sorted([u'T2_CH_CERN', u'T2_CH_CERN_HLT']) results = sorted(self.mySiteDB.seToCMSName("srm-eoscms.cern.ch")) self.assertItemsEqual(results, target) target = sorted([u'T0_CH_CERN', u'T1_CH_CERN']) results = sorted(self.mySiteDB.seToCMSName("srm-cms.cern.ch")) self.assertItemsEqual(results, target) target = sorted([u'T2_CH_CERN_AI']) results = sorted(self.mySiteDB.seToCMSName("eoscmsftp.cern.ch")) self.assertItemsEqual(results, target) def testDNUserName(self): """ Tests DN to Username lookup """ testDn = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=jha/CN=618566/CN=Manoj Jha" testUserName = "******" userName = self.mySiteDB.dnUserName(dn=testDn) self.assertTrue(testUserName == userName) def testDNWithApostrophe(self): """ Tests a DN with an apostrophy in - will fail till SiteDB2 appears """ testDn = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=liviof/CN=472739/CN=Livio Fano'" testUserName = "******" userName = self.mySiteDB.dnUserName(dn=testDn) self.assertTrue(testUserName == userName) def testSEFinder(self): """ _testSEFinder_ See if we can retrieve seNames from all sites """ seNames = self.mySiteDB.getAllSENames() self.assertTrue(len(seNames) > 1) self.assertTrue('cmsdcadisk01.fnal.gov' in seNames) return def testPNNtoPSN(self): """ _testPNNtoPSN_ Test converting PhEDEx Node Name to Processing Site Name """ result = self.mySiteDB.PNNtoPSN('T1_US_FNAL_Disk') self.assertTrue(result == ['T1_US_FNAL']) result = self.mySiteDB.PNNtoPSN('T1_US_FNAL_Tape') self.assertTrue(result == []) result = self.mySiteDB.PNNtoPSN('T2_UK_London_IC') self.assertTrue(result == ['T2_UK_London_IC']) return def testCMSNametoList(self): """ Test PNN to storage list """ result = self.mySiteDB.cmsNametoList("T1_US*", "SE") self.assertItemsEqual(result, [u'cmsdcadisk01.fnal.gov']) def testCheckAndConvertSENameToPNN(self): """ Test the conversion of SE name to PNN for single and multiple sites/PNNs using checkAndConvertSENameToPNN """ fnalSE = u'cmsdcadisk01.fnal.gov' purdueSE = u'srm.rcac.purdue.edu' fnalPNNs = [u'T1_US_FNAL_Buffer', u'T1_US_FNAL_MSS', u'T1_US_FNAL_Disk'] purduePNN = [u'T2_US_Purdue'] pnnList = fnalPNNs + purduePNN seList = [fnalSE, purdueSE] self.assertItemsEqual(self.mySiteDB.checkAndConvertSENameToPNN(fnalSE), fnalPNNs) self.assertItemsEqual(self.mySiteDB.checkAndConvertSENameToPNN([fnalSE]), fnalPNNs) self.assertItemsEqual(self.mySiteDB.checkAndConvertSENameToPNN(purdueSE), purduePNN) self.assertItemsEqual(self.mySiteDB.checkAndConvertSENameToPNN([purdueSE]), purduePNN) self.assertItemsEqual(self.mySiteDB.checkAndConvertSENameToPNN(seList), purduePNN + fnalPNNs) self.assertItemsEqual(self.mySiteDB.checkAndConvertSENameToPNN(pnnList), pnnList) return
class SiteDBTest(unittest.TestCase): """ Unit tests for SiteScreening module """ def setUp(self): """ Setup for unit tests """ EmulatorHelper.setEmulators(siteDB = True) self.mySiteDB = SiteDBJSON() def tearDown(self): EmulatorHelper.resetEmulators() def testCmsNametoPhEDExNode(self): """ Tests CmsNametoSE """ target = ['T1_US_FNAL_MSS','T1_US_FNAL_Buffer'] results = self.mySiteDB.cmsNametoPhEDExNode("T1_US_FNAL") self.failUnless(sorted(results) == sorted(target)) def testPhEDExNodetocmsName(self): """ Tests PhEDExNodetocmsName """ result = self.mySiteDB.phEDExNodetocmsName('T1_US_FNAL_MSS') self.failUnless(result == 'T1_US_FNAL') result = self.mySiteDB.phEDExNodetocmsName('T1_US_FNAL_Buffer') self.failUnless(result == 'T1_US_FNAL') result = self.mySiteDB.phEDExNodetocmsName('T2_UK_London_IC') self.failUnless(result == 'T2_UK_London_IC') # don't check this anymore, see comment in phEDExNodetocmsName function #self.assertRaises(ValueError, self.mySiteDB.phEDExNodetocmsName, # 'T9_DOESNT_EXIST_Buffer') def testCmsNametoSE(self): """ Tests CmsNametoSE """ target = ['srm-cms.gridpp.rl.ac.uk'] results = self.mySiteDB.cmsNametoSE("T1_UK_RAL") self.failUnless(sorted(results) == sorted(target)) def testSEtoCmsName(self): """ Tests CmsNametoSE """ target = ['T1_US_FNAL'] results = self.mySiteDB.seToCMSName("cmssrm.fnal.gov") self.failUnless(results == target) target = sorted(['T2_CH_CERN', 'T2_CH_CERN_HLT']) results = sorted(self.mySiteDB.seToCMSName("srm-eoscms.cern.ch")) self.failUnless(results == target) def testCmsNametoCE(self): """ Tests CmsNametoCE """ target = ['lcgce11.gridpp.rl.ac.uk', 'lcgce10.gridpp.rl.ac.uk', 'lcgce02.gridpp.rl.ac.uk'] results = self.mySiteDB.cmsNametoCE("T1_UK_RAL") self.failUnless(sorted(results) == sorted(target)) def testDNUserName(self): """ Tests DN to Username lookup """ testDn = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=gutsche/CN=582680/CN=Oliver Gutsche" testUserName = "******" userName = self.mySiteDB.dnUserName(dn=testDn) self.failUnless(testUserName == userName) @attr("integration") def testDNWithApostrophe(self): """ Tests a DN with an apostrophy in - will fail till SiteDB2 appears """ testDn = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=liviof/CN=472739/CN=Livio Fano'" testUserName = "******" userName = self.mySiteDB.dnUserName(dn=testDn) self.failUnless(testUserName == userName) def testSEFinder(self): """ _testSEFinder_ See if we can retrieve seNames from all sites """ seNames = self.mySiteDB.getAllSENames() self.assertTrue(len(seNames) > 1) self.assertTrue('cmssrm.fnal.gov' in seNames) return
class SiteDBTest(EmulatedUnitTestCase): """ Unit tests for SiteScreening module """ def __init__(self, methodName='runTest'): super(SiteDBTest, self).__init__(methodName=methodName) def setUp(self): """ Setup for unit tests """ super(SiteDBTest, self).setUp() self.mySiteDB = SiteDBJSON() def testCmsNametoPhEDExNode(self): """ #Tests CmsNametoSE """ target = ['T1_US_FNAL_Buffer', 'T1_US_FNAL_MSS'] results = self.mySiteDB.cmsNametoPhEDExNode('T1_US_FNAL') self.assertItemsEqual(results, target) def testSEtoCmsName(self): """ Tests CmsNametoSE """ target = [u'T1_US_FNAL', u'T1_US_FNAL_Disk'] results = self.mySiteDB.seToCMSName("cmsdcadisk01.fnal.gov") self.assertTrue(results == target) target = sorted([u'T2_CH_CERN', u'T2_CH_CERN_HLT']) results = sorted(self.mySiteDB.seToCMSName("srm-eoscms.cern.ch")) self.assertItemsEqual(results, target) target = sorted([u'T0_CH_CERN', u'T1_CH_CERN']) results = sorted(self.mySiteDB.seToCMSName("srm-cms.cern.ch")) self.assertItemsEqual(results, target) target = sorted([u'T2_CH_CERN_AI']) results = sorted(self.mySiteDB.seToCMSName("eoscmsftp.cern.ch")) self.assertItemsEqual(results, target) def testDNUserName(self): """ Tests DN to Username lookup """ testDn = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=jha/CN=618566/CN=Manoj Jha" testUserName = "******" userName = self.mySiteDB.dnUserName(dn=testDn) self.assertTrue(testUserName == userName) def testDNWithApostrophe(self): """ Tests a DN with an apostrophy in - will fail till SiteDB2 appears """ testDn = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=liviof/CN=472739/CN=Livio Fano'" testUserName = "******" userName = self.mySiteDB.dnUserName(dn=testDn) self.assertTrue(testUserName == userName) def testSEFinder(self): """ _testSEFinder_ See if we can retrieve seNames from all sites """ seNames = self.mySiteDB.getAllSENames() self.assertTrue(len(seNames) > 1) self.assertTrue('cmsdcadisk01.fnal.gov' in seNames) return def testPNNtoPSN(self): """ _testPNNtoPSN_ Test converting PhEDEx Node Name to Processing Site Name """ result = self.mySiteDB.PNNtoPSN('T1_US_FNAL_Disk') self.assertTrue(result == ['T1_US_FNAL']) result = self.mySiteDB.PNNtoPSN('T1_US_FNAL_Tape') self.assertTrue(result == []) result = self.mySiteDB.PNNtoPSN('T2_UK_London_IC') self.assertTrue(result == ['T2_UK_London_IC']) return def testCMSNametoList(self): """ Test PNN to storage list """ result = self.mySiteDB.cmsNametoList("T1_US*", "SE") self.assertItemsEqual(result, [u'cmsdcadisk01.fnal.gov']) def testPNNstoPSNs(self): """ _testPNNstoPSNs_ Test converting PhEDEx Node Names to Processing Site Names """ result = self.mySiteDB.PNNstoPSNs(['T1_US_FNAL_Disk', 'T1_US_FNAL_Buffer', 'T1_US_FNAL_MSS']) self.assertTrue(result == ['T1_US_FNAL']) result = self.mySiteDB.PNNstoPSNs(['T2_UK_London_IC', 'T2_US_Purdue']) self.assertItemsEqual(result, ['T2_UK_London_IC', 'T2_US_Purdue']) return
class SiteDBTest(unittest.TestCase): """ Unit tests for SiteScreening module """ def setUp(self): """ Setup for unit tests """ self.mySiteDB = SiteDBJSON() def testCmsNametoPhEDExNode(self): """ Tests CmsNametoSE """ target = ['T1_US_FNAL_MSS', 'T1_US_FNAL_Buffer'] results = self.mySiteDB.cmsNametoPhEDExNode("T1_US_FNAL") self.failUnless(sorted(results) == sorted(target)) def testPhEDExNodetocmsName(self): """ Tests PhEDExNodetocmsName """ result = self.mySiteDB.phEDExNodetocmsName('T1_US_FNAL_MSS') self.failUnless(result == 'T1_US_FNAL') result = self.mySiteDB.phEDExNodetocmsName('T1_US_FNAL_Buffer') self.failUnless(result == 'T1_US_FNAL') result = self.mySiteDB.phEDExNodetocmsName('T2_UK_London_IC') self.failUnless(result == 'T2_UK_London_IC') # don't check this anymore, see comment in phEDExNodetocmsName function #self.assertRaises(ValueError, self.mySiteDB.phEDExNodetocmsName, # 'T9_DOESNT_EXIST_Buffer') def testCmsNametoSE(self): """ Tests CmsNametoSE """ target = ['srm-cms.gridpp.rl.ac.uk', 'srm-cms-disk.gridpp.rl.ac.uk'] results = self.mySiteDB.cmsNametoSE("T1_UK_RAL") self.failUnless(sorted(results) == sorted(target)) def testSEtoCmsName(self): """ Tests CmsNametoSE """ target = 'T1_US_FNAL' results = self.mySiteDB.seToCMSName("cmssrm.fnal.gov") self.failUnless(results == target) def testCmsNametoCE(self): """ Tests CmsNametoCE """ target = [ 'lcgce09.gridpp.rl.ac.uk', 'lcgce06.gridpp.rl.ac.uk', 'lcgce07.gridpp.rl.ac.uk', 'lcgce07.gridpp.rl.ac.uk' ] results = self.mySiteDB.cmsNametoCE("T1_UK_RAL") self.failUnless(sorted(results) == sorted(target)) def testJSONParser(self): """ Tests the JSON parser directly """ cmsName = "cmsgrid02.hep.wisc.edu" results = self.mySiteDB.getJSON("CEtoCMSName", file="CEtoCMSName", name=cmsName) self.failUnless(results['0']['name'] == "T2_US_Wisconsin") def testDNUserName(self): """ Tests DN to Username lookup """ testDn = "/C=UK/O=eScience/OU=Bristol/L=IS/CN=simon metson" testUserName = "******" userName = self.mySiteDB.dnUserName(dn=testDn) self.failUnless(testUserName == userName) @attr("integration") def testDNWithApostrophe(self): """ Tests a DN with an apostrophy in - will fail till SiteDB2 appears """ testDn = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=liviof/CN=472739/CN=Livio Fano'" testUserName = "******" userName = self.mySiteDB.dnUserName(dn=testDn) self.failUnless(testUserName == userName) def testSEFinder(self): """ _testSEFinder_ See if we can retrieve seNames from all sites """ ceNames = self.mySiteDB.getAllSENames() self.assertTrue(len(ceNames) > 1) self.assertTrue('cmssrm.fnal.gov' in ceNames) return @attr("integration") def testParsingJsonWithApostrophe(self): """ Tests parsing a DN json with an apostrophe in """ json = """{"dn": "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=liviof/CN=472739/CN=Livio Fano'", "user": "******"}""" d = self.mySiteDB.parser.dictParser(json) self.assertEquals( "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=liviof/CN=472739/CN=Livio Fano'", d['dn']) @attr("integration") def testParsingInvalidJsonWithApostrophe(self): """ Tests parsing a DN invalid json (from sitedb v1) with an apostrophe in """ json = """{'dn': '/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=liviof/CN=472739/CN=Livio' Fano', 'user': '******'}""" d = self.mySiteDB.parser.dictParser(json) self.assertEquals( "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=liviof/CN=472739/CN=Livio' Fano", d['dn']) json = """{'dn': '/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=liviof/CN=472739/CN=Livio Fano'', 'user': '******'}""" d = self.mySiteDB.parser.dictParser(json) self.assertEquals( "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=liviof/CN=472739/CN=Livio Fano'", d['dn'])
class SiteDBTest(EmulatedUnitTestCase): """ Unit tests for SiteScreening module """ def __init__(self, methodName='runTest'): super(SiteDBTest, self).__init__(methodName=methodName) def setUp(self): """ Setup for unit tests """ super(SiteDBTest, self).setUp() EmulatorHelper.setEmulators(phedex=False, dbs=False, siteDB=False, requestMgr=True) self.mySiteDB = SiteDBJSON() def tearDown(self): """ _tearDown_ """ super(SiteDBTest, self).tearDown() EmulatorHelper.resetEmulators() return def testCmsNametoPhEDExNode(self): """ #Tests CmsNametoSE """ target = ['T1_US_FNAL_Buffer', 'T1_US_FNAL_MSS'] results = self.mySiteDB.cmsNametoPhEDExNode('T1_US_FNAL') self.assertItemsEqual(results, target) def testSEtoCmsName(self): """ Tests CmsNametoSE """ target = [u'T1_US_FNAL', u'T1_US_FNAL_Disk'] results = self.mySiteDB.seToCMSName("cmsdcadisk01.fnal.gov") self.assertTrue(results == target) target = sorted([u'T2_CH_CERN', u'T2_CH_CERN_HLT']) results = sorted(self.mySiteDB.seToCMSName("srm-eoscms.cern.ch")) self.assertItemsEqual(results, target) target = sorted([u'T0_CH_CERN', u'T1_CH_CERN']) results = sorted(self.mySiteDB.seToCMSName("srm-cms.cern.ch")) self.assertItemsEqual(results, target) target = sorted([u'T2_CH_CERN_AI']) results = sorted(self.mySiteDB.seToCMSName("eoscmsftp.cern.ch")) self.assertItemsEqual(results, target) def testDNUserName(self): """ Tests DN to Username lookup """ testDn = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=jha/CN=618566/CN=Manoj Jha" testUserName = "******" userName = self.mySiteDB.dnUserName(dn=testDn) self.assertTrue(testUserName == userName) def testDNWithApostrophe(self): """ Tests a DN with an apostrophy in - will fail till SiteDB2 appears """ testDn = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=liviof/CN=472739/CN=Livio Fano'" testUserName = "******" userName = self.mySiteDB.dnUserName(dn=testDn) self.assertTrue(testUserName == userName) def testSEFinder(self): """ _testSEFinder_ See if we can retrieve seNames from all sites """ seNames = self.mySiteDB.getAllSENames() self.assertTrue(len(seNames) > 1) self.assertTrue('cmsdcadisk01.fnal.gov' in seNames) return def testPNNtoPSN(self): """ _testPNNtoPSN_ Test converting PhEDEx Node Name to Processing Site Name """ result = self.mySiteDB.PNNtoPSN('T1_US_FNAL_Disk') self.assertTrue(result == ['T1_US_FNAL']) result = self.mySiteDB.PNNtoPSN('T1_US_FNAL_Tape') self.assertTrue(result == []) result = self.mySiteDB.PNNtoPSN('T2_UK_London_IC') self.assertTrue(result == ['T2_UK_London_IC']) return def testCMSNametoList(self): """ Test PNN to storage list """ result = self.mySiteDB.cmsNametoList("T1_US*", "SE") self.assertItemsEqual(result, [u'cmsdcadisk01.fnal.gov']) def testCheckAndConvertSENameToPNN(self): """ Test the conversion of SE name to PNN for single and multiple sites/PNNs using checkAndConvertSENameToPNN """ fnalSE = u'cmsdcadisk01.fnal.gov' purdueSE = u'srm.rcac.purdue.edu' fnalPNNs = [u'T1_US_FNAL_Buffer', u'T1_US_FNAL_MSS', u'T1_US_FNAL_Disk'] purduePNN = [u'T2_US_Purdue'] pnnList = fnalPNNs + purduePNN seList = [fnalSE, purdueSE] self.assertItemsEqual(self.mySiteDB.checkAndConvertSENameToPNN(fnalSE), fnalPNNs) self.assertItemsEqual(self.mySiteDB.checkAndConvertSENameToPNN([fnalSE]), fnalPNNs) self.assertItemsEqual(self.mySiteDB.checkAndConvertSENameToPNN(purdueSE), purduePNN) self.assertItemsEqual(self.mySiteDB.checkAndConvertSENameToPNN([purdueSE]), purduePNN) self.assertItemsEqual(self.mySiteDB.checkAndConvertSENameToPNN(seList), purduePNN + fnalPNNs) self.assertItemsEqual(self.mySiteDB.checkAndConvertSENameToPNN(pnnList), pnnList) return def testPNNstoPSNs(self): """ _testPNNstoPSNs_ Test converting PhEDEx Node Names to Processing Site Names """ result = self.mySiteDB.PNNstoPSNs(['T1_US_FNAL_Disk', 'T1_US_FNAL_Buffer', 'T1_US_FNAL_MSS']) self.assertTrue(result == ['T1_US_FNAL']) result = self.mySiteDB.PNNstoPSNs(['T2_UK_London_IC', 'T2_US_Purdue']) self.assertItemsEqual(result, ['T2_UK_London_IC', 'T2_US_Purdue']) return
class SiteDBTest(EmulatedUnitTestCase): """ Unit tests for SiteScreening module """ def __init__(self, methodName='runTest'): super(SiteDBTest, self).__init__(methodName=methodName) def setUp(self): """ Setup for unit tests """ super(SiteDBTest, self).setUp() self.mySiteDB = SiteDBJSON() def testCmsNametoPhEDExNode(self): """ #Tests CmsNametoSE """ target = ['T1_US_FNAL_Buffer', 'T1_US_FNAL_MSS'] results = self.mySiteDB.cmsNametoPhEDExNode('T1_US_FNAL') self.assertItemsEqual(results, target) def testSEtoCmsName(self): """ Tests CmsNametoSE """ target = [u'T1_US_FNAL', u'T1_US_FNAL_Disk'] results = self.mySiteDB.seToCMSName("cmsdcadisk01.fnal.gov") self.assertTrue(results == target) target = sorted([u'T2_CH_CERN', u'T2_CH_CERN_HLT']) results = sorted(self.mySiteDB.seToCMSName("srm-eoscms.cern.ch")) self.assertItemsEqual(results, target) target = sorted([u'T0_CH_CERN', u'T1_CH_CERN']) results = sorted(self.mySiteDB.seToCMSName("srm-cms.cern.ch")) self.assertItemsEqual(results, target) target = sorted([u'T2_CH_CERN_AI']) results = sorted(self.mySiteDB.seToCMSName("eoscmsftp.cern.ch")) self.assertItemsEqual(results, target) def testDNUserName(self): """ Tests DN to Username lookup """ testDn = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=jha/CN=618566/CN=Manoj Jha" testUserName = "******" userName = self.mySiteDB.dnUserName(dn=testDn) self.assertTrue(testUserName == userName) def testDNWithApostrophe(self): """ Tests a DN with an apostrophy in - will fail till SiteDB2 appears """ testDn = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=liviof/CN=472739/CN=Livio Fano'" testUserName = "******" userName = self.mySiteDB.dnUserName(dn=testDn) self.assertTrue(testUserName == userName) def testSEFinder(self): """ _testSEFinder_ See if we can retrieve seNames from all sites """ seNames = self.mySiteDB.getAllSENames() self.assertTrue(len(seNames) > 1) self.assertTrue('cmsdcadisk01.fnal.gov' in seNames) return def testPNNtoPSN(self): """ _testPNNtoPSN_ Test converting PhEDEx Node Name to Processing Site Name """ result = self.mySiteDB.PNNtoPSN('T1_US_FNAL_Disk') self.assertTrue(result == ['T1_US_FNAL']) result = self.mySiteDB.PNNtoPSN('T1_US_FNAL_Tape') self.assertTrue(result == []) result = self.mySiteDB.PNNtoPSN('T2_UK_London_IC') self.assertTrue(result == ['T2_UK_London_IC']) return def testCMSNametoList(self): """ Test PNN to storage list """ result = self.mySiteDB.cmsNametoList("T1_US*", "SE") self.assertItemsEqual(result, [u'cmsdcadisk01.fnal.gov']) def testPNNstoPSNs(self): """ _testPNNstoPSNs_ Test converting PhEDEx Node Names to Processing Site Names """ result = self.mySiteDB.PNNstoPSNs( ['T1_US_FNAL_Disk', 'T1_US_FNAL_Buffer', 'T1_US_FNAL_MSS']) self.assertTrue(result == ['T1_US_FNAL']) result = self.mySiteDB.PNNstoPSNs(['T2_UK_London_IC', 'T2_US_Purdue']) self.assertItemsEqual(result, ['T2_UK_London_IC', 'T2_US_Purdue']) return
class SiteDBTest(EmulatedUnitTestCase): """ Unit tests for SiteScreening module """ def __init__(self, methodName='runTest'): super(SiteDBTest, self).__init__(methodName=methodName) def setUp(self): """ Setup for unit tests """ super(SiteDBTest, self).setUp() self.mySiteDB = SiteDBJSON() def testCmsNametoPhEDExNode(self): """ Tests CMS Name to PhEDEx Node Name """ target = ['T1_US_FNAL_Buffer', 'T1_US_FNAL_MSS'] results = self.mySiteDB.cmsNametoPhEDExNode('T1_US_FNAL') self.assertItemsEqual(results, target) def testDNUserName(self): """ Tests DN to Username lookup """ testDn = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=jha/CN=618566/CN=Manoj Jha" testUserName = "******" userName = self.mySiteDB.dnUserName(dn=testDn) self.assertTrue(testUserName == userName) def testDNWithApostrophe(self): """ Tests a DN with an apostrophy in - will fail till SiteDB2 appears """ testDn = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=liviof/CN=472739/CN=Livio Fano'" testUserName = "******" userName = self.mySiteDB.dnUserName(dn=testDn) self.assertTrue(testUserName == userName) def testSEFinder(self): """ _testSEFinder_ See if we can retrieve seNames from all sites """ seNames = self.mySiteDB.getAllSENames() self.assertTrue(len(seNames) > 1) self.assertTrue('cmsdcadisk01.fnal.gov' in seNames) return def testPNNtoPSN(self): """ _testPNNtoPSN_ Test converting PhEDEx Node Name to Processing Site Name """ result = self.mySiteDB.PNNtoPSN('T1_US_FNAL_Disk') self.assertTrue(result == ['T1_US_FNAL']) result = self.mySiteDB.PNNtoPSN('T1_US_FNAL_Tape') self.assertTrue(result == []) result = self.mySiteDB.PNNtoPSN('T2_UK_London_IC') self.assertTrue(result == ['T2_UK_London_IC']) return def testCMSNametoList(self): """ Test PNN to storage list """ result = self.mySiteDB.cmsNametoList("T1_US*", "SE") self.assertItemsEqual(result, [u'cmsdcadisk01.fnal.gov']) def testPNNstoPSNs(self): """ _testPNNstoPSNs_ Test converting PhEDEx Node Names to Processing Site Names """ result = self.mySiteDB.PNNstoPSNs( ['T1_US_FNAL_Disk', 'T1_US_FNAL_Buffer', 'T1_US_FNAL_MSS']) self.assertTrue(result == ['T1_US_FNAL']) result = self.mySiteDB.PNNstoPSNs(['T2_UK_London_IC', 'T2_US_Purdue']) self.assertItemsEqual(result, ['T2_UK_London_IC', 'T2_US_Purdue']) return def testPSNtoPNNMap(self): """ _PSNtoPNNMap_ Test API to get a map of PSNs and PNNs """ result = self.mySiteDB.PSNtoPNNMap() self.assertTrue( [psn for psn in result.keys() if psn.startswith('T1_')]) self.assertTrue( [psn for psn in result.keys() if psn.startswith('T2_')]) self.assertTrue( [psn for psn in result.keys() if psn.startswith('T3_')]) self.assertTrue(len(result) > 50) result = self.mySiteDB.PSNtoPNNMap(psnPattern='T1.*') self.assertFalse( [psn for psn in result.keys() if not psn.startswith('T1_')]) self.assertTrue(len(result) < 10) result = self.mySiteDB.PSNtoPNNMap(psnPattern='T2.*') self.assertFalse( [psn for psn in result.keys() if not psn.startswith('T2_')]) self.assertTrue(len(result) > 10) result = self.mySiteDB.PSNtoPNNMap(psnPattern='T3.*') self.assertFalse( [psn for psn in result.keys() if not psn.startswith('T3_')]) self.assertTrue(len(result) > 10) return def testGetAllPhEDExNodeNames(self): """ _testGetAllPhEDExNodeNames_ Test API to get all PhEDEx Node Names """ result = self.mySiteDB.getAllPhEDExNodeNames(excludeBuffer=True) self.assertFalse([pnn for pnn in result if pnn.endswith('_Buffer')]) result = self.mySiteDB.getAllPhEDExNodeNames(excludeBuffer=False) self.assertTrue( len([pnn for pnn in result if pnn.endswith('_Buffer')]) > 5) result = self.mySiteDB.getAllPhEDExNodeNames(pattern='T1.*', excludeBuffer=True) self.assertFalse([pnn for pnn in result if not pnn.startswith('T1_')]) self.assertTrue(len(result) > 10) result = self.mySiteDB.getAllPhEDExNodeNames(pattern='.*', excludeBuffer=True) self.assertTrue([pnn for pnn in result if pnn.startswith('T1_')]) self.assertTrue([pnn for pnn in result if pnn.startswith('T2_')]) self.assertTrue([pnn for pnn in result if pnn.startswith('T3_')]) self.assertTrue(len(result) > 60) return
class SiteDBTest(unittest.TestCase): """ Unit tests for SiteScreening module """ def setUp(self): """ Setup for unit tests """ EmulatorHelper.setEmulators(siteDB = True) self.mySiteDB = SiteDBJSON() def tearDown(self): EmulatorHelper.resetEmulators() def testCmsNametoPhEDExNode(self): """ Tests CmsNametoSE """ target = ['T1_US_FNAL_MSS','T1_US_FNAL_Buffer'] results = self.mySiteDB.cmsNametoPhEDExNode("T1_US_FNAL") self.failUnless(sorted(results) == sorted(target)) def testPhEDExNodetocmsName(self): """ Tests PhEDExNodetocmsName """ result = self.mySiteDB.phEDExNodetocmsName('T1_US_FNAL_MSS') self.failUnless(result == 'T1_US_FNAL') result = self.mySiteDB.phEDExNodetocmsName('T1_US_FNAL_Buffer') self.failUnless(result == 'T1_US_FNAL') result = self.mySiteDB.phEDExNodetocmsName('T2_UK_London_IC') self.failUnless(result == 'T2_UK_London_IC') # don't check this anymore, see comment in phEDExNodetocmsName function #self.assertRaises(ValueError, self.mySiteDB.phEDExNodetocmsName, # 'T9_DOESNT_EXIST_Buffer') def testCmsNametoSE(self): """ Tests CmsNametoSE """ target = ['srm-cms.gridpp.rl.ac.uk'] results = self.mySiteDB.cmsNametoSE("T1_UK_RAL") self.failUnless(sorted(results) == sorted(target)) def testCmsNamePatterntoSE(self): """ Tests CmsNamePatterntoSE """ target = ['T2_XX_SiteA', 'T2_XX_SiteB', 'T2_XX_SiteC'] results = self.mySiteDB.cmsNametoSE("%T2_XX") self.failUnless(sorted(results) == sorted(target)) def testSEtoCmsName(self): """ Tests CmsNametoSE """ target = ['T1_US_FNAL'] results = self.mySiteDB.seToCMSName("cmssrm.fnal.gov") self.failUnless(results == target) target = sorted(['T2_CH_CERN', 'T2_CH_CERN_HLT']) results = sorted(self.mySiteDB.seToCMSName("srm-eoscms.cern.ch")) self.failUnless(results == target) def testCmsNametoCE(self): """ Tests CmsNametoCE """ target = ['lcgce11.gridpp.rl.ac.uk', 'lcgce10.gridpp.rl.ac.uk', 'lcgce02.gridpp.rl.ac.uk'] results = self.mySiteDB.cmsNametoCE("T1_UK_RAL") self.failUnless(sorted(results) == sorted(target)) def testCmsNamePatterntoCE(self): """ Tests CmsNamePatterntoCE """ target = ['T2_XX_SiteA', 'T2_XX_SiteB', 'T2_XX_SiteC'] results = self.mySiteDB.cmsNametoCE("%T2_XX") self.failUnless(sorted(results) == sorted(target)) def testDNUserName(self): """ Tests DN to Username lookup """ testDn = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=gutsche/CN=582680/CN=Oliver Gutsche" testUserName = "******" userName = self.mySiteDB.dnUserName(dn=testDn) self.failUnless(testUserName == userName) @attr("integration") def testDNWithApostrophe(self): """ Tests a DN with an apostrophy in - will fail till SiteDB2 appears """ testDn = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=liviof/CN=472739/CN=Livio Fano'" testUserName = "******" userName = self.mySiteDB.dnUserName(dn=testDn) self.failUnless(testUserName == userName) def testSEFinder(self): """ _testSEFinder_ See if we can retrieve seNames from all sites """ seNames = self.mySiteDB.getAllSENames() self.assertTrue(len(seNames) > 1) self.assertTrue('cmssrm.fnal.gov' in seNames) return
class WMBSHelperTest(EmulatedUnitTestCase): def setUp(self): """ _setUp_ """ super(WMBSHelperTest, self).setUp() self.testInit = TestInitCouchApp(__file__) self.testInit.setLogging() self.testInit.setDatabaseConnection(destroyAllDatabase=True) self.testInit.setupCouch("wmbshelper_t/jobs", "JobDump") self.testInit.setupCouch("wmbshelper_t/fwjrs", "FWJRDump") self.testInit.setupCouch("config_test", "GroupUser", "ConfigCache") os.environ["COUCHDB"] = "wmbshelper_t" self.testInit.setSchema(customModules = ["WMCore.WMBS", "WMComponent.DBS3Buffer", "WMCore.BossAir", "WMCore.ResourceControl"], useDefault = False) self.workDir = self.testInit.generateWorkDir() self.wmspec = self.createWMSpec() self.topLevelTask = getFirstTask(self.wmspec) self.inputDataset = self.topLevelTask.inputDataset() self.dataset = self.topLevelTask.getInputDatasetPath() self.dbs = DBSReader(self.inputDataset.dbsurl) self.daoFactory = DAOFactory(package = "WMCore.WMBS", logger = threading.currentThread().logger, dbinterface = threading.currentThread().dbi) return def tearDown(self): """ _tearDown_ Clear out the database. """ self.testInit.clearDatabase() self.testInit.tearDownCouch() self.testInit.delWorkDir() super(WMBSHelperTest, self).tearDown() return def setupForKillTest(self, baAPI = None): """ _setupForKillTest_ Inject a workflow into WMBS that has a processing task, a merge task and a cleanup task. Inject files into the various tasks at various processing states (acquired, complete, available...). Also create jobs for each subscription in various states. """ myThread = threading.currentThread() daoFactory = DAOFactory(package = "WMCore.WMBS", logger = myThread.logger, dbinterface = myThread.dbi) dummyLocationAction = daoFactory(classname = "Locations.New") changeStateAction = daoFactory(classname = "Jobs.ChangeState") resourceControl = ResourceControl() resourceControl.insertSite(siteName = 'site1', pnn = 'goodse.cern.ch', ceName = 'site1', plugin = "TestPlugin") resourceControl.insertThreshold(siteName = 'site1', taskType = 'Processing', \ maxSlots = 10000, pendingSlots = 10000) userDN = 'someDN' userAction = daoFactory(classname = "Users.New") userAction.execute(dn = userDN, group_name = 'DEFAULT', role_name = 'DEFAULT') inputFileset = Fileset("input") inputFileset.create() inputFileA = File("lfnA", locations = "goodse.cern.ch") inputFileB = File("lfnB", locations = "goodse.cern.ch") inputFileC = File("lfnC", locations = "goodse.cern.ch") inputFileA.create() inputFileB.create() inputFileC.create() inputFileset.addFile(inputFileA) inputFileset.addFile(inputFileB) inputFileset.addFile(inputFileC) inputFileset.commit() unmergedOutputFileset = Fileset("unmerged") unmergedOutputFileset.create() unmergedFileA = File("ulfnA", locations = "goodse.cern.ch") unmergedFileB = File("ulfnB", locations = "goodse.cern.ch") unmergedFileC = File("ulfnC", locations = "goodse.cern.ch") unmergedFileA.create() unmergedFileB.create() unmergedFileC.create() unmergedOutputFileset.addFile(unmergedFileA) unmergedOutputFileset.addFile(unmergedFileB) unmergedOutputFileset.addFile(unmergedFileC) unmergedOutputFileset.commit() mainProcWorkflow = Workflow(spec = "spec1", owner = "Steve", name = "Main", task = "Proc") mainProcWorkflow.create() mainProcMergeWorkflow = Workflow(spec = "spec1", owner = "Steve", name = "Main", task = "ProcMerge") mainProcMergeWorkflow.create() mainCleanupWorkflow = Workflow(spec = "spec1", owner = "Steve", name = "Main", task = "Cleanup") mainCleanupWorkflow.create() self.mainProcSub = Subscription(fileset = inputFileset, workflow = mainProcWorkflow, type = "Processing") self.mainProcSub.create() self.mainProcSub.acquireFiles(inputFileA) self.mainProcSub.completeFiles(inputFileB) procJobGroup = JobGroup(subscription = self.mainProcSub) procJobGroup.create() self.procJobA = Job(name = "ProcJobA") self.procJobA["state"] = "new" self.procJobA["location"] = "site1" self.procJobB = Job(name = "ProcJobB") self.procJobB["state"] = "executing" self.procJobB["location"] = "site1" self.procJobC = Job(name = "ProcJobC") self.procJobC["state"] = "complete" self.procJobC["location"] = "site1" self.procJobA.create(procJobGroup) self.procJobB.create(procJobGroup) self.procJobC.create(procJobGroup) self.mainMergeSub = Subscription(fileset = unmergedOutputFileset, workflow = mainProcMergeWorkflow, type = "Merge") self.mainMergeSub.create() self.mainMergeSub.acquireFiles(unmergedFileA) self.mainMergeSub.failFiles(unmergedFileB) mergeJobGroup = JobGroup(subscription = self.mainMergeSub) mergeJobGroup.create() self.mergeJobA = Job(name = "MergeJobA") self.mergeJobA["state"] = "exhausted" self.mergeJobA["location"] = "site1" self.mergeJobB = Job(name = "MergeJobB") self.mergeJobB["state"] = "cleanout" self.mergeJobB["location"] = "site1" self.mergeJobC = Job(name = "MergeJobC") self.mergeJobC["state"] = "new" self.mergeJobC["location"] = "site1" self.mergeJobA.create(mergeJobGroup) self.mergeJobB.create(mergeJobGroup) self.mergeJobC.create(mergeJobGroup) self.mainCleanupSub = Subscription(fileset = unmergedOutputFileset, workflow = mainCleanupWorkflow, type = "Cleanup") self.mainCleanupSub.create() self.mainCleanupSub.acquireFiles(unmergedFileA) self.mainCleanupSub.completeFiles(unmergedFileB) cleanupJobGroup = JobGroup(subscription = self.mainCleanupSub) cleanupJobGroup.create() self.cleanupJobA = Job(name = "CleanupJobA") self.cleanupJobA["state"] = "new" self.cleanupJobA["location"] = "site1" self.cleanupJobB = Job(name = "CleanupJobB") self.cleanupJobB["state"] = "executing" self.cleanupJobB["location"] = "site1" self.cleanupJobC = Job(name = "CleanupJobC") self.cleanupJobC["state"] = "complete" self.cleanupJobC["location"] = "site1" self.cleanupJobA.create(cleanupJobGroup) self.cleanupJobB.create(cleanupJobGroup) self.cleanupJobC.create(cleanupJobGroup) jobList = [self.procJobA, self.procJobB, self.procJobC, self.mergeJobA, self.mergeJobB, self.mergeJobC, self.cleanupJobA, self.cleanupJobB, self.cleanupJobC] changeStateAction.execute(jobList) if baAPI: for job in jobList: job['plugin'] = 'TestPlugin' job['userdn'] = userDN job['usergroup'] = 'DEFAULT' job['userrole'] = 'DEFAULT' job['custom']['location'] = 'site1' baAPI.createNewJobs(wmbsJobs = jobList) # We'll create an unrelated workflow to verify that it isn't affected # by the killing code. bogusFileset = Fileset("dontkillme") bogusFileset.create() bogusFileA = File("bogus/lfnA", locations = "goodse.cern.ch") bogusFileA.create() bogusFileset.addFile(bogusFileA) bogusFileset.commit() bogusWorkflow = Workflow(spec = "spec2", owner = "Steve", name = "Bogus", task = "Proc") bogusWorkflow.create() self.bogusSub = Subscription(fileset = bogusFileset, workflow = bogusWorkflow, type = "Processing") self.bogusSub.create() self.bogusSub.acquireFiles(bogusFileA) return def verifyFileKillStatus(self): """ _verifyFileKillStatus_ Verify that all files were killed correctly. The status of files in Cleanup and LogCollect subscriptions isn't modified. Status of already completed and failed files is not modified. Also verify that the bogus subscription is untouched. """ failedFiles = self.mainProcSub.filesOfStatus("Failed") acquiredFiles = self.mainProcSub.filesOfStatus("Acquired") completedFiles = self.mainProcSub.filesOfStatus("Completed") availableFiles = self.mainProcSub.filesOfStatus("Available") bogusAcquiredFiles = self.bogusSub.filesOfStatus("Acquired") self.assertEqual(len(availableFiles), 0, \ "Error: There should be no available files.") self.assertEqual(len(acquiredFiles), 0, \ "Error: There should be no acquired files.") self.assertEqual(len(bogusAcquiredFiles), 1, \ "Error: There should be one acquired file.") self.assertEqual(len(completedFiles), 3, \ "Error: There should be only one completed file.") goldenLFNs = ["lfnA", "lfnB", "lfnC"] for completedFile in completedFiles: self.assertTrue(completedFile["lfn"] in goldenLFNs, \ "Error: Extra completed file.") goldenLFNs.remove(completedFile["lfn"]) self.assertEqual(len(failedFiles), 0, \ "Error: There should be no failed files.") self.assertEqual(len(goldenLFNs), 0, \ "Error: Missing LFN") failedFiles = self.mainMergeSub.filesOfStatus("Failed") acquiredFiles = self.mainMergeSub.filesOfStatus("Acquired") completedFiles = self.mainMergeSub.filesOfStatus("Completed") availableFiles = self.mainMergeSub.filesOfStatus("Available") self.assertEqual(len(acquiredFiles), 0, \ "Error: Merge subscription should have 0 acq files.") self.assertEqual(len(availableFiles), 0, \ "Error: Merge subscription should have 0 avail files.") self.assertEqual(len(failedFiles), 1, \ "Error: Merge subscription should have 1 failed files.") self.assertEqual(list(failedFiles)[0]["lfn"], "ulfnB", "Error: Wrong failed file.") self.assertEqual(len(completedFiles), 2, \ "Error: Merge subscription should have 2 compl files.") goldenLFNs = ["ulfnA", "ulfnC"] for completedFile in completedFiles: self.assertTrue(completedFile["lfn"] in goldenLFNs, \ "Error: Extra complete file.") goldenLFNs.remove(completedFile["lfn"]) self.assertEqual(len(goldenLFNs), 0, \ "Error: Missing LFN") failedFiles = self.mainCleanupSub.filesOfStatus("Failed") acquiredFiles = self.mainCleanupSub.filesOfStatus("Acquired") completedFiles = self.mainCleanupSub.filesOfStatus("Completed") availableFiles = self.mainCleanupSub.filesOfStatus("Available") self.assertEqual(len(failedFiles), 0, \ "Error: Cleanup subscription should have 0 fai files.") self.assertEqual(len(acquiredFiles), 1, \ "Error: There should be only one acquired file.") self.assertEqual(list(acquiredFiles)[0]["lfn"], "ulfnA", \ "Error: Wrong acquired LFN.") self.assertEqual(len(completedFiles), 1, \ "Error: There should be only one completed file.") self.assertEqual(list(completedFiles)[0]["lfn"], "ulfnB", \ "Error: Wrong completed LFN.") self.assertEqual(len(availableFiles), 1, \ "Error: There should be only one available file.") self.assertEqual(list(availableFiles)[0]["lfn"], "ulfnC", \ "Error: Wrong completed LFN.") return def verifyJobKillStatus(self): """ _verifyJobKillStatus_ Verify that jobs are killed correctly. Jobs belonging to Cleanup and LogCollect subscriptions are not killed. The status of jobs that have already finished running is not changed. """ self.procJobA.load() self.procJobB.load() self.procJobC.load() self.assertEqual(self.procJobA["state"], "killed", \ "Error: Proc job A should be killed.") self.assertEqual(self.procJobB["state"], "killed", \ "Error: Proc job B should be killed.") self.assertEqual(self.procJobC["state"], "complete", \ "Error: Proc job C should be complete.") self.mergeJobA.load() self.mergeJobB.load() self.mergeJobC.load() self.assertEqual(self.mergeJobA["state"], "exhausted", \ "Error: Merge job A should be exhausted.") self.assertEqual(self.mergeJobB["state"], "cleanout", \ "Error: Merge job B should be cleanout.") self.assertEqual(self.mergeJobC["state"], "killed", \ "Error: Merge job C should be killed.") self.cleanupJobA.load() self.cleanupJobB.load() self.cleanupJobC.load() self.assertEqual(self.cleanupJobA["state"], "new", \ "Error: Cleanup job A should be new.") self.assertEqual(self.cleanupJobB["state"], "executing", \ "Error: Cleanup job B should be executing.") self.assertEqual(self.cleanupJobC["state"], "complete", \ "Error: Cleanup job C should be complete.") return def createTestWMSpec(self): """ _createTestWMSpec_ Create a WMSpec that has a processing, merge, cleanup and skims tasks that can be used by the subscription creation test. """ testWorkload = WMWorkloadHelper(WMWorkload("TestWorkload")) testWorkload.setDashboardActivity("TestReReco") testWorkload.setSpecUrl("/path/to/workload") testWorkload.setOwnerDetails("sfoulkes", "DMWM", {'dn': 'MyDN'}) procTask = testWorkload.newTask("ProcessingTask") procTask.setTaskType("Processing") procTask.setSplittingAlgorithm("FileBased", files_per_job = 1) procTaskCMSSW = procTask.makeStep("cmsRun1") procTaskCMSSW.setStepType("CMSSW") procTaskCMSSWHelper = procTaskCMSSW.getTypeHelper() procTask.setTaskType("Processing") procTask.setSiteWhitelist(["site1"]) procTask.setSiteBlacklist(["site2"]) procTask.applyTemplates() procTaskCMSSWHelper.addOutputModule("OutputA", primaryDataset = "bogusPrimary", processedDataset = "bogusProcessed", dataTier = "DataTierA", lfnBase = "bogusUnmerged", mergedLFNBase = "bogusMerged", filterName = None) mergeTask = procTask.addTask("MergeTask") mergeTask.setInputReference(procTaskCMSSW, outputModule = "OutputA") mergeTask.setTaskType("Merge") mergeTask.setSplittingAlgorithm("WMBSMergeBySize", min_merge_size = 1, max_merge_size = 2, max_merge_events = 3) mergeTaskCMSSW = mergeTask.makeStep("cmsRun1") mergeTaskCMSSW.setStepType("CMSSW") mergeTaskCMSSWHelper = mergeTaskCMSSW.getTypeHelper() mergeTask.setTaskType("Merge") mergeTask.applyTemplates() mergeTaskCMSSWHelper.addOutputModule("Merged", primaryDataset = "bogusPrimary", processedDataset = "bogusProcessed", dataTier = "DataTierA", lfnBase = "bogusUnmerged", mergedLFNBase = "bogusMerged", filterName = None) cleanupTask = procTask.addTask("CleanupTask") cleanupTask.setInputReference(procTaskCMSSW, outputModule = "OutputA") cleanupTask.setTaskType("Merge") cleanupTask.setSplittingAlgorithm("SiblingProcessingBased", files_per_job = 50) cleanupTaskCMSSW = cleanupTask.makeStep("cmsRun1") cleanupTaskCMSSW.setStepType("CMSSW") dummyCleanupTaskCMSSWHelper = cleanupTaskCMSSW.getTypeHelper() cleanupTask.setTaskType("Cleanup") cleanupTask.applyTemplates() skimTask = mergeTask.addTask("SkimTask") skimTask.setTaskType("Skim") skimTask.setInputReference(mergeTaskCMSSW, outputModule = "Merged") skimTask.setSplittingAlgorithm("FileBased", files_per_job = 1, include_parents = True) skimTaskCMSSW = skimTask.makeStep("cmsRun1") skimTaskCMSSW.setStepType("CMSSW") skimTaskCMSSWHelper = skimTaskCMSSW.getTypeHelper() skimTask.setTaskType("Skim") skimTask.applyTemplates() skimTaskCMSSWHelper.addOutputModule("SkimOutputA", primaryDataset = "bogusPrimary", processedDataset = "bogusProcessed", dataTier = "DataTierA", lfnBase = "bogusUnmerged", mergedLFNBase = "bogusMerged", filterName = None) skimTaskCMSSWHelper.addOutputModule("SkimOutputB", primaryDataset = "bogusPrimary", processedDataset = "bogusProcessed", dataTier = "DataTierA", lfnBase = "bogusUnmerged", mergedLFNBase = "bogusMerged", filterName = None) return testWorkload def setupMCWMSpec(self): """Setup MC workflow""" self.wmspec = self.createMCWMSpec() self.topLevelTask = getFirstTask(self.wmspec) self.inputDataset = self.topLevelTask.inputDataset() self.dataset = self.topLevelTask.getInputDatasetPath() self.dbs = None self.siteDB = SiteDBJSON() # add sites that would normally be added by operator via resource_control locationDAO = self.daoFactory(classname = "Locations.New") self.pnns = [] for site in ['T2_XX_SiteA', 'T2_XX_SiteB']: locationDAO.execute(siteName = site, pnn = self.siteDB.cmsNametoPhEDExNode(site)[0]) self.pnns.append(self.siteDB.cmsNametoPhEDExNode(site)[0]) def createWMSpec(self, name = 'ReRecoWorkload'): factory = ReRecoWorkloadFactory() rerecoArgs["ConfigCacheID"] = createConfig(rerecoArgs["CouchDBName"]) wmspec = factory.factoryWorkloadConstruction(name, rerecoArgs) wmspec.setSpecUrl("/path/to/workload") wmspec.setSubscriptionInformation(custodialSites = [], nonCustodialSites = [], autoApproveSites = [], priority = "Low", custodialSubType = "Move") return wmspec def createMCWMSpec(self, name='MonteCarloWorkload'): mcArgs['CouchDBName'] = rerecoArgs["CouchDBName"] mcArgs["ConfigCacheID"] = createConfig(mcArgs["CouchDBName"]) wmspec = monteCarloWorkload(name, mcArgs) wmspec.setSpecUrl("/path/to/workload") getFirstTask(wmspec).addProduction(totalevents=10000) return wmspec def getDBS(self, wmspec): topLevelTask = getFirstTask(wmspec) inputDataset = topLevelTask.inputDataset() dbs = DBSReader(inputDataset.dbsurl) #dbsDict = {self.inputDataset.dbsurl : self.dbs} return dbs def createWMBSHelperWithTopTask(self, wmspec, block, mask = None, parentFlag = False, detail = False): topLevelTask = getFirstTask(wmspec) wmbs = WMBSHelper(wmspec, topLevelTask.name(), block, mask, cachepath = self.workDir) if block: if parentFlag: block = self.dbs.getFileBlockWithParents(block)[block] else: block = self.dbs.getFileBlock(block)[block] sub, files = wmbs.createSubscriptionAndAddFiles(block = block) if detail: return wmbs, sub, files else: return wmbs def testKillWorkflow(self): """ _testKillWorkflow_ Verify that workflow killing works correctly. """ configFile = EmulatorSetup.setupWMAgentConfig() config = loadConfigurationFile(configFile) baAPI = BossAirAPI(config = config) # Create nine jobs self.setupForKillTest(baAPI = baAPI) self.assertEqual(len(baAPI._listRunJobs()), 9) killWorkflow("Main", config, config) self.verifyFileKillStatus() self.verifyJobKillStatus() self.assertEqual(len(baAPI._listRunJobs()), 8) EmulatorSetup.deleteConfig(configFile) return def testCreateSubscription(self): """ _testCreateSubscription_ Verify that the subscription creation code works correctly. """ resourceControl = ResourceControl() resourceControl.insertSite(siteName = 'site1', pnn = 'goodse.cern.ch', ceName = 'site1', plugin = "TestPlugin") resourceControl.insertSite(siteName = 'site2', pnn = 'goodse2.cern.ch', ceName = 'site2', plugin = "TestPlugin") testWorkload = self.createTestWMSpec() testTopLevelTask = getFirstTask(testWorkload) testWMBSHelper = WMBSHelper(testWorkload, testTopLevelTask.name(), "SomeBlock", cachepath = self.workDir) testWMBSHelper.createTopLevelFileset() testWMBSHelper._createSubscriptionsInWMBS(testTopLevelTask, testWMBSHelper.topLevelFileset) procWorkflow = Workflow(name = "TestWorkload", task = "/TestWorkload/ProcessingTask") procWorkflow.load() self.assertEqual(procWorkflow.owner, "sfoulkes", "Error: Wrong owner: %s" % procWorkflow.owner) self.assertEqual(procWorkflow.group, "DMWM", "Error: Wrong group: %s" % procWorkflow.group) self.assertEqual(procWorkflow.wfType, "TestReReco", "Error: Wrong type.") self.assertEqual(procWorkflow.spec, os.path.join(self.workDir, procWorkflow.name, "WMSandbox", "WMWorkload.pkl"), "Error: Wrong spec URL") self.assertEqual(len(procWorkflow.outputMap.keys()), 1, "Error: Wrong number of WF outputs.") mergedProcOutput = procWorkflow.outputMap["OutputA"][0]["merged_output_fileset"] unmergedProcOutput = procWorkflow.outputMap["OutputA"][0]["output_fileset"] mergedProcOutput.loadData() unmergedProcOutput.loadData() self.assertEqual(mergedProcOutput.name, "/TestWorkload/ProcessingTask/MergeTask/merged-Merged", "Error: Merged output fileset is wrong.") self.assertEqual(unmergedProcOutput.name, "/TestWorkload/ProcessingTask/unmerged-OutputA", "Error: Unmerged output fileset is wrong.") mergeWorkflow = Workflow(name = "TestWorkload", task = "/TestWorkload/ProcessingTask/MergeTask") mergeWorkflow.load() self.assertEqual(mergeWorkflow.owner, "sfoulkes", "Error: Wrong owner.") self.assertEqual(mergeWorkflow.spec, os.path.join(self.workDir, mergeWorkflow.name, "WMSandbox", "WMWorkload.pkl"), "Error: Wrong spec URL") self.assertEqual(len(mergeWorkflow.outputMap.keys()), 1, "Error: Wrong number of WF outputs.") cleanupWorkflow = Workflow(name = "TestWorkload", task = "/TestWorkload/ProcessingTask/CleanupTask") cleanupWorkflow.load() self.assertEqual(cleanupWorkflow.owner, "sfoulkes", "Error: Wrong owner.") self.assertEqual(cleanupWorkflow.spec, os.path.join(self.workDir, cleanupWorkflow.name, "WMSandbox", "WMWorkload.pkl"), "Error: Wrong spec URL") self.assertEqual(len(cleanupWorkflow.outputMap.keys()), 0, "Error: Wrong number of WF outputs.") unmergedMergeOutput = mergeWorkflow.outputMap["Merged"][0]["output_fileset"] unmergedMergeOutput.loadData() self.assertEqual(unmergedMergeOutput.name, "/TestWorkload/ProcessingTask/MergeTask/merged-Merged", "Error: Unmerged output fileset is wrong.") skimWorkflow = Workflow(name = "TestWorkload", task = "/TestWorkload/ProcessingTask/MergeTask/SkimTask") skimWorkflow.load() self.assertEqual(skimWorkflow.owner, "sfoulkes", "Error: Wrong owner.") self.assertEqual(skimWorkflow.spec, os.path.join(self.workDir, skimWorkflow.name, "WMSandbox", "WMWorkload.pkl"), "Error: Wrong spec URL") self.assertEqual(len(skimWorkflow.outputMap.keys()), 2, "Error: Wrong number of WF outputs.") mergedSkimOutputA = skimWorkflow.outputMap["SkimOutputA"][0]["merged_output_fileset"] unmergedSkimOutputA = skimWorkflow.outputMap["SkimOutputA"][0]["output_fileset"] mergedSkimOutputB = skimWorkflow.outputMap["SkimOutputB"][0]["merged_output_fileset"] unmergedSkimOutputB = skimWorkflow.outputMap["SkimOutputB"][0]["output_fileset"] mergedSkimOutputA.loadData() mergedSkimOutputB.loadData() unmergedSkimOutputA.loadData() unmergedSkimOutputB.loadData() self.assertEqual(mergedSkimOutputA.name, "/TestWorkload/ProcessingTask/MergeTask/SkimTask/unmerged-SkimOutputA", "Error: Merged output fileset is wrong: %s" % mergedSkimOutputA.name) self.assertEqual(unmergedSkimOutputA.name, "/TestWorkload/ProcessingTask/MergeTask/SkimTask/unmerged-SkimOutputA", "Error: Unmerged output fileset is wrong.") self.assertEqual(mergedSkimOutputB.name, "/TestWorkload/ProcessingTask/MergeTask/SkimTask/unmerged-SkimOutputB", "Error: Merged output fileset is wrong.") self.assertEqual(unmergedSkimOutputB.name, "/TestWorkload/ProcessingTask/MergeTask/SkimTask/unmerged-SkimOutputB", "Error: Unmerged output fileset is wrong.") topLevelFileset = Fileset(name = "TestWorkload-ProcessingTask-SomeBlock") topLevelFileset.loadData() procSubscription = Subscription(fileset = topLevelFileset, workflow = procWorkflow) procSubscription.loadData() self.assertEqual(len(procSubscription.getWhiteBlackList()), 2, "Error: Wrong site white/black list for proc sub.") for site in procSubscription.getWhiteBlackList(): if site["site_name"] == "site1": self.assertEqual(site["valid"], 1, "Error: Site should be white listed.") else: self.assertEqual(site["valid"], 0, "Error: Site should be black listed.") self.assertEqual(procSubscription["type"], "Processing", "Error: Wrong subscription type.") self.assertEqual(procSubscription["split_algo"], "FileBased", "Error: Wrong split algo.") mergeSubscription = Subscription(fileset = unmergedProcOutput, workflow = mergeWorkflow) mergeSubscription.loadData() self.assertEqual(len(mergeSubscription.getWhiteBlackList()), 0, "Error: Wrong white/black list for merge sub.") self.assertEqual(mergeSubscription["type"], "Merge", "Error: Wrong subscription type.") self.assertEqual(mergeSubscription["split_algo"], "WMBSMergeBySize", "Error: Wrong split algo.") skimSubscription = Subscription(fileset = unmergedMergeOutput, workflow = skimWorkflow) skimSubscription.loadData() self.assertEqual(skimSubscription["type"], "Skim", "Error: Wrong subscription type.") self.assertEqual(skimSubscription["split_algo"], "FileBased", "Error: Wrong split algo.") return def testTruncatedWFInsertion(self): """ _testTruncatedWFInsertion_ """ resourceControl = ResourceControl() resourceControl.insertSite(siteName = 'site1', pnn = 'goodse.cern.ch', ceName = 'site1', plugin = "TestPlugin") resourceControl.insertSite(siteName = 'site2', pnn = 'goodse2.cern.ch', ceName = 'site2', plugin = "TestPlugin") testWorkload = self.createTestWMSpec() testTopLevelTask = getFirstTask(testWorkload) testWMBSHelper = WMBSHelper(testWorkload, testTopLevelTask.name(), "SomeBlock", cachepath = self.workDir) testWMBSHelper.createTopLevelFileset() testWMBSHelper._createSubscriptionsInWMBS(testTopLevelTask, testWMBSHelper.topLevelFileset) testWorkload.truncate("ResubmitTestWorkload", "/TestWorkload/ProcessingTask/MergeTask", "someserver", "somedatabase") # create the subscription for multiple top task (MergeTask and CleanupTask for the same block) for task in testWorkload.getTopLevelTask(): testResubmitWMBSHelper = WMBSHelper(testWorkload, task.name(), "SomeBlock2", cachepath = self.workDir) testResubmitWMBSHelper.createTopLevelFileset() testResubmitWMBSHelper._createSubscriptionsInWMBS(task, testResubmitWMBSHelper.topLevelFileset) mergeWorkflow = Workflow(name = "ResubmitTestWorkload", task = "/ResubmitTestWorkload/MergeTask") mergeWorkflow.load() self.assertEqual(mergeWorkflow.owner, "sfoulkes", "Error: Wrong owner.") self.assertEqual(mergeWorkflow.spec, os.path.join(self.workDir, mergeWorkflow.name, "WMSandbox", "WMWorkload.pkl"), "Error: Wrong spec URL") self.assertEqual(len(mergeWorkflow.outputMap.keys()), 1, "Error: Wrong number of WF outputs.") unmergedMergeOutput = mergeWorkflow.outputMap["Merged"][0]["output_fileset"] unmergedMergeOutput.loadData() self.assertEqual(unmergedMergeOutput.name, "/ResubmitTestWorkload/MergeTask/merged-Merged", "Error: Unmerged output fileset is wrong.") skimWorkflow = Workflow(name = "ResubmitTestWorkload", task = "/ResubmitTestWorkload/MergeTask/SkimTask") skimWorkflow.load() self.assertEqual(skimWorkflow.owner, "sfoulkes", "Error: Wrong owner.") self.assertEqual(skimWorkflow.spec, os.path.join(self.workDir, skimWorkflow.name, "WMSandbox", "WMWorkload.pkl"), "Error: Wrong spec URL") self.assertEqual(len(skimWorkflow.outputMap.keys()), 2, "Error: Wrong number of WF outputs.") mergedSkimOutputA = skimWorkflow.outputMap["SkimOutputA"][0]["merged_output_fileset"] unmergedSkimOutputA = skimWorkflow.outputMap["SkimOutputA"][0]["output_fileset"] mergedSkimOutputB = skimWorkflow.outputMap["SkimOutputB"][0]["merged_output_fileset"] unmergedSkimOutputB = skimWorkflow.outputMap["SkimOutputB"][0]["output_fileset"] mergedSkimOutputA.loadData() mergedSkimOutputB.loadData() unmergedSkimOutputA.loadData() unmergedSkimOutputB.loadData() self.assertEqual(mergedSkimOutputA.name, "/ResubmitTestWorkload/MergeTask/SkimTask/unmerged-SkimOutputA", "Error: Merged output fileset is wrong: %s" % mergedSkimOutputA.name) self.assertEqual(unmergedSkimOutputA.name, "/ResubmitTestWorkload/MergeTask/SkimTask/unmerged-SkimOutputA", "Error: Unmerged output fileset is wrong.") self.assertEqual(mergedSkimOutputB.name, "/ResubmitTestWorkload/MergeTask/SkimTask/unmerged-SkimOutputB", "Error: Merged output fileset is wrong.") self.assertEqual(unmergedSkimOutputB.name, "/ResubmitTestWorkload/MergeTask/SkimTask/unmerged-SkimOutputB", "Error: Unmerged output fileset is wrong.") topLevelFileset = Fileset(name = "ResubmitTestWorkload-MergeTask-SomeBlock2") topLevelFileset.loadData() mergeSubscription = Subscription(fileset = topLevelFileset, workflow = mergeWorkflow) mergeSubscription.loadData() self.assertEqual(len(mergeSubscription.getWhiteBlackList()), 0, "Error: Wrong white/black list for merge sub.") self.assertEqual(mergeSubscription["type"], "Merge", "Error: Wrong subscription type.") self.assertEqual(mergeSubscription["split_algo"], "WMBSMergeBySize", "Error: Wrong split algo.") skimSubscription = Subscription(fileset = unmergedMergeOutput, workflow = skimWorkflow) skimSubscription.loadData() self.assertEqual(skimSubscription["type"], "Skim", "Error: Wrong subscription type.") self.assertEqual(skimSubscription["split_algo"], "FileBased", "Error: Wrong split algo.") return def testReReco(self): """ReReco workflow""" # create workflow block = self.dataset + "#" + BLOCK1 wmbs = self.createWMBSHelperWithTopTask(self.wmspec, block) files = wmbs.validFiles(self.dbs.getFileBlock(block)) self.assertEqual(len(files), 1) def testReRecoBlackRunRestriction(self): """ReReco workflow with Run restrictions""" block = self.dataset + "#" + BLOCK2 self.topLevelTask.setInputRunBlacklist([181183]) # Set run blacklist to only run in the block wmbs = self.createWMBSHelperWithTopTask(self.wmspec, block) files = wmbs.validFiles(self.dbs.getFileBlock(block)[block]['Files']) self.assertEqual(len(files), 0) def testReRecoWhiteRunRestriction(self): block = self.dataset + "#" + BLOCK2 self.topLevelTask.setInputRunWhitelist([181183]) # Set run whitelist to only run in the block wmbs = self.createWMBSHelperWithTopTask(self.wmspec, block) files = wmbs.validFiles(self.dbs.getFileBlock(block)[block]['Files']) self.assertEqual(len(files), 1) def testLumiMaskRestrictionsOK(self): block = self.dataset + "#" + BLOCK1 self.wmspec.getTopLevelTask()[0].data.input.splitting.runs = ['181367'] self.wmspec.getTopLevelTask()[0].data.input.splitting.lumis = ['57,80'] wmbs = self.createWMBSHelperWithTopTask(self.wmspec, block) files = wmbs.validFiles(self.dbs.getFileBlock(block)[block]['Files']) self.assertEqual(len(files), 1) def testLumiMaskRestrictionsKO(self): block = self.dataset + "#" + BLOCK1 self.wmspec.getTopLevelTask()[0].data.input.splitting.runs = ['123454321'] self.wmspec.getTopLevelTask()[0].data.input.splitting.lumis = ['123,123'] wmbs = self.createWMBSHelperWithTopTask(self.wmspec, block) files = wmbs.validFiles(self.dbs.getFileBlock(block)[block]['Files']) self.assertEqual(len(files), 0) def testDuplicateFileInsert(self): # using default wmspec block = self.dataset + "#" + BLOCK1 wmbs = self.createWMBSHelperWithTopTask(self.wmspec, block) wmbs.topLevelFileset.loadData() numOfFiles = len(wmbs.topLevelFileset.files) # check initially inserted files. dbsFiles = self.dbs.getFileBlock(block)[block]['Files'] self.assertEqual(numOfFiles, len(dbsFiles)) firstFileset = wmbs.topLevelFileset wmbsDao = wmbs.daofactory(classname = "Files.InFileset") numOfFiles = len(wmbsDao.execute(firstFileset.id)) self.assertEqual(numOfFiles, len(dbsFiles)) # use the new spec with same inputdataset block = self.dataset + "#" + BLOCK1 wmspec = self.createWMSpec("TestSpec1") dbs = self.getDBS(wmspec) wmbs = self.createWMBSHelperWithTopTask(wmspec, block) # check duplicate insert dbsFiles = dbs.getFileBlock(block)[block]['Files'] numOfFiles = wmbs.addFiles(dbs.getFileBlock(block)[block]) self.assertEqual(numOfFiles, 0) secondFileset = wmbs.topLevelFileset wmbsDao = wmbs.daofactory(classname = "Files.InFileset") numOfFiles = len(wmbsDao.execute(secondFileset.id)) self.assertEqual(numOfFiles, len(dbsFiles)) self.assertNotEqual(firstFileset.id, secondFileset.id) def testDuplicateSubscription(self): """Can't duplicate subscriptions""" # using default wmspec block = self.dataset + "#" + BLOCK1 wmbs = self.createWMBSHelperWithTopTask(self.wmspec, block) wmbs.topLevelFileset.loadData() numOfFiles = len(wmbs.topLevelFileset.files) filesetId = wmbs.topLevelFileset.id subId = wmbs.topLevelSubscription['id'] # check initially inserted files. dbsFiles = self.dbs.getFileBlock(block)[block]['Files'] self.assertEqual(numOfFiles, len(dbsFiles)) # Not clear what's supposed to happen here, 2nd test is completely redundant dummyFirstFileset = wmbs.topLevelFileset self.assertEqual(numOfFiles, len(dbsFiles)) # reinsert subscription - shouldn't create anything new wmbs = self.createWMBSHelperWithTopTask(self.wmspec, block) wmbs.topLevelFileset.loadData() self.assertEqual(numOfFiles, len(wmbs.topLevelFileset.files)) self.assertEqual(filesetId, wmbs.topLevelFileset.id) self.assertEqual(subId, wmbs.topLevelSubscription['id']) # now do a montecarlo workflow self.setupMCWMSpec() mask = Mask(FirstRun = 12, FirstLumi = 1234, FirstEvent = 12345, LastEvent = 999995, LastLumi = 12345, LastRun = 12) wmbs = self.createWMBSHelperWithTopTask(self.wmspec, None, mask) wmbs.topLevelFileset.loadData() numOfFiles = len(wmbs.topLevelFileset.files) filesetId = wmbs.topLevelFileset.id subId = wmbs.topLevelSubscription['id'] # check initially inserted files. # Not clear what's supposed to happen here, 2nd test is completely redundant numDbsFiles = 1 self.assertEqual(numOfFiles, numDbsFiles) dummyFirstFileset = wmbs.topLevelFileset self.assertEqual(numOfFiles, numDbsFiles) # reinsert subscription - shouldn't create anything new wmbs = self.createWMBSHelperWithTopTask(self.wmspec, None, mask) wmbs.topLevelFileset.loadData() self.assertEqual(numOfFiles, len(wmbs.topLevelFileset.files)) self.assertEqual(filesetId, wmbs.topLevelFileset.id) self.assertEqual(subId, wmbs.topLevelSubscription['id']) def testParentage(self): """ 1. check whether parent files are created in wmbs. 2. check parent files are associated to child. 3. When 2 specs with the same input data (one with parent processing, one without it) is inserted, if one without parent processing inserted first then the other with parent processing insert, it still needs to create parent files although child files are duplicate """ # Swap out the dataset for one that has parents task = next(self.wmspec.taskIterator()) oldDS = task.inputDataset() # Copy the old dataset, only will use DBS URL from it task.addInputDataset(dbsurl=oldDS.dbsurl, primary='Cosmics', processed='ComissioningHI-PromptReco-v1', tier='RECO') block = '/Cosmics/ComissioningHI-PromptReco-v1/RECO' + '#5b89ba9c-0dbf-11e1-9b6c-003048caaace' # File creation without parents wmbs, _, numFiles = self.createWMBSHelperWithTopTask(self.wmspec, block, parentFlag=False, detail=True) self.assertEqual(8, numFiles) wmbs.topLevelFileset.loadData() for child in wmbs.topLevelFileset.files: self.assertEqual(len(child["parents"]), 0) # no parents per child # File creation with parents wmbs, _, numFiles = self.createWMBSHelperWithTopTask(self.wmspec, block, parentFlag=True, detail=True) self.assertEqual(8, numFiles) wmbs.topLevelFileset.loadData() for child in wmbs.topLevelFileset.files: self.assertEqual(len(child["parents"]), 1) # one parent per child def testMCFakeFileInjection(self): """Inject fake Monte Carlo files into WMBS""" # This test is failing because the name of the couch DB is set to None # in TestMonteCarloWorkloadFactory.getMCArgs() but changing it to # "reqmgr_config_cache_t" from StdBase test arguments does not fix the # situation. testDuplicateSubscription probably has the same issue self.setupMCWMSpec() mask = Mask(FirstRun = 12, FirstLumi = 1234, FirstEvent = 12345, LastEvent = 999995, LastLumi = 12345, LastRun = 12) wmbs = self.createWMBSHelperWithTopTask(self.wmspec, None, mask) subscription = wmbs.topLevelSubscription self.assertEqual(1, subscription.exists()) fileset = subscription['fileset'] self.assertEqual(1, fileset.exists()) fileset.loadData() # need to refresh from database self.assertEqual(len(fileset.files), 1) self.assertEqual(len(fileset.parents), 0) self.assertFalse(fileset.open) firstFile = list(fileset.files)[0] self.assertEqual(firstFile['events'], mask['LastEvent'] - mask['FirstEvent'] + 1) # inclusive range self.assertEqual(firstFile['merged'], False) # merged files get added to dbs self.assertEqual(len(firstFile['parents']), 0) #firstFile.loadData() self.assertEqual(sorted(firstFile['locations']), sorted(self.pnns)) self.assertEqual(len(firstFile.getParentLFNs()), 0) self.assertEqual(len(firstFile.getRuns()), 1) run = firstFile.getRuns()[0] self.assertEqual(run.run, mask['FirstRun']) self.assertEqual(run.lumis[0], mask['FirstLumi']) self.assertEqual(run.lumis[-1], mask['LastLumi']) self.assertEqual(len(run.lumis), mask['LastLumi'] - mask['FirstLumi'] + 1)