def testAddCKSumByLFN(self): """ _testAddCKSumByLFN_ """ testFileA = DBSBufferFile(lfn = "/this/is/a/lfn", size = 1024, events = 10) testFileA.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileA.create() checksums = {"adler32": "adler32", "cksum": "cksum", "md5": "md5"} setCksumAction = self.daoFactory(classname = "DBSBufferFiles.AddChecksumByLFN") binds = [{'lfn': "/this/is/a/lfn", 'cktype': 'adler32', 'cksum': 201}, {'lfn': "/this/is/a/lfn", 'cktype': 'cksum', 'cksum': 101}] setCksumAction.execute(bulkList = binds) testFileB = DBSBufferFile(id = testFileA["id"]) testFileB.load() self.assertEqual(testFileB['checksums'], {'adler32': '201', 'cksum': '101'}) return
def loadFilesByBlock(self, blockname): """ _loadFilesByBlock_ Get all files associated with a block """ dbsFiles = [] findFiles = self.daoFactory(classname = "LoadFilesByBlock") results = findFiles.execute(blockname = blockname, transaction = False) for entry in results: # Add loaded information dbsfile = DBSBufferFile(id=entry['id']) dbsfile.update(entry) dbsFiles.append(dbsfile) for dbsfile in dbsFiles: if 'runInfo' in dbsfile.keys(): # Then we have to replace it with a real run for r in dbsfile['runInfo'].keys(): run = Run(runNumber = r) run.extend(dbsfile['runInfo'][r]) dbsfile.addRun(run) del dbsfile['runInfo'] if 'parentLFNs' in dbsfile.keys(): # Then we have some parents for lfn in dbsfile['parentLFNs']: newFile = DBSBufferFile(lfn = lfn) dbsfile['parents'].add(newFile) del dbsfile['parentLFNs'] return dbsFiles
def createTestFiles(self): """ _createTestFiles_ Create some dbsbuffer test files with different statuses :return: """ phedexStatus = self.dbsbufferFactory(classname="DBSBufferFiles.SetPhEDExStatus") for i in range(0, 4): lfn = "/path/to/some/lfn" + str(i) # Two files should be InDBS, two files should be NOTUPLOADED if i in [0,2]: status = 'InDBS' else: status = 'NOTUPLOADED' testDBSFile = DBSBufferFile(lfn=lfn, size=600000, events=60000, status=status, workflowId=1) testDBSFile.setAlgorithm(appName="cmsRun", appVer="UNKNOWN", appFam="RECO", psetHash="SOMEHASH" + str(i), configContent="SOMECONTENT") testDBSFile.setDatasetPath("/path/to/some/dataset") testDBSFile.create() # Create all four combinations of status(InDBS,NOTUPLOADED) and in_phedex(0,1) if i in [0,1]: phedexStatus.execute(lfn, 1)
def findUploadableFilesByDAS(self, datasetpath): """ _findUploadableDAS_ Find all the uploadable files for a given DatasetPath. """ dbsFiles = [] findFiles = self.daoFactory(classname = "LoadDBSFilesByDAS") results = findFiles.execute(datasetpath = datasetpath, transaction = False) for entry in results: # Add loaded information dbsfile = DBSBufferFile(id=entry['id']) dbsfile.update(entry) dbsFiles.append(dbsfile) for dbsfile in dbsFiles: if 'runInfo' in dbsfile.keys(): # Then we have to replace it with a real run for r in dbsfile['runInfo'].keys(): run = Run(runNumber = r) run.extend(dbsfile['runInfo'][r]) dbsfile.addRun(run) del dbsfile['runInfo'] if 'parentLFNs' in dbsfile.keys(): # Then we have some parents for lfn in dbsfile['parentLFNs']: newFile = DBSBufferFile(lfn = lfn) dbsfile['parents'].add(newFile) del dbsfile['parentLFNs'] return dbsFiles
def testXSetBlock(self): """ _testSetBlock_ Verify that the [Set|Get]Block DAOs work correctly. """ myThread = threading.currentThread() uploadFactory = DAOFactory(package = "WMComponent.DBSUpload.Database", logger = myThread.logger, dbinterface = myThread.dbi) createAction = uploadFactory(classname = "SetBlockStatus") createAction.execute(block = "someblockname", locations = ["se1.cern.ch"]) setBlockAction = self.daoFactory(classname = "DBSBufferFiles.SetBlock") getBlockAction = self.daoFactory(classname = "DBSBufferFiles.GetBlock") testFile = DBSBufferFile(lfn = "/this/is/a/lfn", size = 1024, events = 10, locations = "se1.fnal.gov") testFile.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFile.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFile.create() setBlockAction.execute(lfn = testFile["lfn"], blockName = "someblockname") blockName = getBlockAction.execute(lfn = testFile["lfn"]) assert blockName[0][0] == "someblockname", \ "Error: Incorrect block returned: %s" % blockName[0][0] return
def _addToDBSBuffer(self, dbsFile, checksums, locations): """ This step is just for increase the performance for Accountant doesn't neccessary to check the parentage """ dbsBuffer = DBSBufferFile(lfn=dbsFile["LogicalFileName"], size=dbsFile["FileSize"], events=dbsFile["NumberOfEvents"], checksums=checksums, locations=locations, status="GLOBAL", inPhedex=1) dbsBuffer.setDatasetPath('bogus') dbsBuffer.setAlgorithm(appName="cmsRun", appVer="Unknown", appFam="Unknown", psetHash="Unknown", configContent="Unknown") self.dbsFilesToCreate.add(dbsBuffer) return
def testFilesize(self): """ _testFilesize_ Test storing and loading the file information from dbsbuffer_file. Make sure filesize can be bigger than 32 bits """ checksums = {"adler32": "adler32", "cksum": "cksum"} testFileA = DBSBufferFile(lfn = "/this/is/a/lfn", size = 3221225472, events = 1500000, checksums = checksums) testFileA.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_7_6_0", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileA.create() testFileB = DBSBufferFile(lfn = testFileA["lfn"]) testFileB.load() self.assertEqual(testFileB["size"], 3221225472, "Error: the filesize should be 3GB") self.assertEqual(testFileB["events"], 1500000, "Error: the number of events should be 1.5M")
def testAddRunSet(self): """ _testAddRunSet_ Test the ability to add run and lumi information to a file. """ testFile = DBSBufferFile(lfn="/this/is/a/lfn", size=1024, events=10, locations="se1.fnal.gov") testFile.setAlgorithm( appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH" ) testFile.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFile.create() runSet = set() runSet.add(Run(1, *[45])) runSet.add(Run(2, *[67, 68])) testFile.addRunSet(runSet) assert (runSet - testFile["runs"]) == set(), "Error: addRunSet is not updating set correctly"
def loadDBSBufferFilesBulk(self, fileObjs): """ _loadDBSBufferFilesBulk_ Yes, this is a stupid place to put it. No, there's not better place. """ myThread = threading.currentThread() dbsFiles = [] existingTransaction = self.beginTransaction() binds = [] for f in fileObjs: binds.append(f["id"]) loadFiles = self.daoFactory(classname = "DBSBufferFiles.LoadBulkFilesByID") results = loadFiles.execute(files = binds, conn = self.getDBConn(), transaction = self.existingTransaction()) for entry in results: # Add loaded information dbsfile = DBSBufferFile(id=entry['id']) dbsfile.update(entry) dbsFiles.append(dbsfile) for dbsfile in dbsFiles: if 'runInfo' in dbsfile.keys(): # Then we have to replace it with a real run for r in dbsfile['runInfo'].keys(): run = Run(runNumber = r) run.extend(dbsfile['runInfo'][r]) dbsfile.addRun(run) del dbsfile['runInfo'] if 'parentLFNs' in dbsfile.keys(): # Then we have some parents for lfn in dbsfile['parentLFNs']: newFile = DBSBufferFile(lfn = lfn) dbsfile['parents'].add(newFile) del dbsfile['parentLFNs'] self.commitTransaction(existingTransaction) return dbsFiles
def findUploadableFilesByDAS(self, das): """ _findUploadableDAS_ Find all the Dataset-Algo files available with uploadable files. """ myThread = threading.currentThread() existingTransaction = self.beginTransaction() dbsFiles = [] factory = DAOFactory(package = "WMComponent.DBSUpload.Database", logger = myThread.logger, dbinterface = myThread.dbi) findFiles = factory(classname = "LoadDBSFilesByDAS") results = findFiles.execute(das = das, conn = self.getDBConn(), transaction=self.existingTransaction()) for entry in results: # Add loaded information dbsfile = DBSBufferFile(id=entry['id']) dbsfile.update(entry) dbsFiles.append(dbsfile) for dbsfile in dbsFiles: if 'runInfo' in dbsfile.keys(): # Then we have to replace it with a real run for r in dbsfile['runInfo'].keys(): run = Run(runNumber = r) run.extend(dbsfile['runInfo'][r]) dbsfile.addRun(run) del dbsfile['runInfo'] if 'parentLFNs' in dbsfile.keys(): # Then we have some parents for lfn in dbsfile['parentLFNs']: newFile = DBSBufferFile(lfn = lfn) dbsfile['parents'].add(newFile) del dbsfile['parentLFNs'] self.commitTransaction(existingTransaction) return dbsFiles
def loadFilesFromBlocks(self, blockID): """ _loadFilesFromBlocks_ Load the files from all active blocks """ findFiles = self.factory(classname = "LoadFilesFromBlocks") myThread = threading.currentThread() existingTransaction = self.beginTransaction() dbsFiles = [] results = findFiles.execute(blockID = blockID, conn = self.getDBConn(), transaction=self.existingTransaction()) for entry in results: # Add loaded information dbsfile = DBSBufferFile(id=entry['id']) dbsfile.update(entry) dbsFiles.append(dbsfile) for dbsfile in dbsFiles: if 'runInfo' in dbsfile.keys(): # Then we have to replace it with a real run for r in dbsfile['runInfo'].keys(): run = Run(runNumber = r) run.extend(dbsfile['runInfo'][r]) dbsfile.addRun(run) del dbsfile['runInfo'] if 'parentLFNs' in dbsfile.keys(): # Then we have some parents for lfn in dbsfile['parentLFNs']: newFile = DBSBufferFile(lfn = lfn) dbsfile['parents'].add(newFile) del dbsfile['parentLFNs'] self.commitTransaction(existingTransaction) return dbsFiles
def findUploadableFilesByDAS(self, das): """ _findUploadableDAS_ Find all the Dataset-Algo files available with uploadable files. """ myThread = threading.currentThread() existingTransaction = self.beginTransaction() dbsFiles = [] findFiles = self.daoFactory(classname="LoadDBSFilesByDAS") results = findFiles.execute(das=das, conn=self.getDBConn(), transaction=self.existingTransaction()) for entry in results: # Add loaded information dbsfile = DBSBufferFile(id=entry["id"]) dbsfile.update(entry) dbsFiles.append(dbsfile) for dbsfile in dbsFiles: if "runInfo" in dbsfile.keys(): # Then we have to replace it with a real run for r in dbsfile["runInfo"].keys(): run = Run(runNumber=r) run.extend(dbsfile["runInfo"][r]) dbsfile.addRun(run) del dbsfile["runInfo"] if "parentLFNs" in dbsfile.keys(): # Then we have some parents for lfn in dbsfile["parentLFNs"]: newFile = DBSBufferFile(lfn=lfn) dbsfile["parents"].add(newFile) del dbsfile["parentLFNs"] self.commitTransaction(existingTransaction) return dbsFiles
def loadFilesByBlock(self, blockname): """ _loadFilesByBlock_ Get all files associated with a block """ dbsFiles = [] existingTransaction = self.beginTransaction() findFiles = self.daoFactory(classname="LoadFilesByBlock") results = findFiles.execute(blockname=blockname, conn=self.getDBConn(), transaction=self.existingTransaction()) for entry in results: # Add loaded information dbsfile = DBSBufferFile(id=entry["id"]) dbsfile.update(entry) dbsFiles.append(dbsfile) for dbsfile in dbsFiles: if "runInfo" in dbsfile.keys(): # Then we have to replace it with a real run for r in dbsfile["runInfo"].keys(): run = Run(runNumber=r) run.extend(dbsfile["runInfo"][r]) dbsfile.addRun(run) del dbsfile["runInfo"] if "parentLFNs" in dbsfile.keys(): # Then we have some parents for lfn in dbsfile["parentLFNs"]: newFile = DBSBufferFile(lfn=lfn) dbsfile["parents"].add(newFile) del dbsfile["parentLFNs"] self.commitTransaction(existingTransaction) return dbsFiles
def injectFilesFromDBS(inputFileset, datasetPath): """ _injectFilesFromDBS_ """ print "injecting files from %s into %s, please wait..." % (datasetPath, inputFileset.name) args={} args["url"] = "https://cmsweb.cern.ch/dbs/prod/global/DBSReader" args["version"] = "DBS_2_0_9" args["mode"] = "GET" dbsApi = DbsApi(args) dbsResults = dbsApi.listFileArray(path = datasetPath, retriveList = ["retrive_lumi", "retrive_run"]) dbsResults = dbsResults[0:10] print " found %d files, inserting into wmbs..." % (len(dbsResults)) for dbsResult in dbsResults: myFile = File(lfn = dbsResult["LogicalFileName"], size = dbsResult["FileSize"], events = dbsResult["NumberOfEvents"], checksums = {"cksum": dbsResult["Checksum"]}, locations = "cmssrm.fnal.gov", merged = True) myRun = Run(runNumber = dbsResult["LumiList"][0]["RunNumber"]) for lumi in dbsResult["LumiList"]: myRun.lumis.append(lumi["LumiSectionNumber"]) myFile.addRun(myRun) myFile.create() inputFileset.addFile(myFile) dbsFile = DBSBufferFile(lfn = dbsResult["LogicalFileName"], size = dbsResult["FileSize"], events = dbsResult["NumberOfEvents"], checksums = {"cksum": dbsResult["Checksum"]}, locations = "cmssrm.fnal.gov", status = "LOCAL") dbsFile.setDatasetPath(datasetPath) dbsFile.setAlgorithm(appName = "cmsRun", appVer = "Unknown", appFam = "Unknown", psetHash = "Unknown", configContent = "Unknown") dbsFile.create() inputFileset.commit() inputFileset.markOpen(False) return
def stuffDatabase(self, tier0Mode = False): """ _stuffDatabase_ Fill the dbsbuffer with some files and blocks. We'll insert a total of 5 files spanning two blocks. There will be a total of two datasets inserted into the database. All files will be already in GLOBAL and in_phedex """ myThread = threading.currentThread() buffer3Factory = DAOFactory(package = "WMComponent.DBS3Buffer", logger = myThread.logger, dbinterface = myThread.dbi) insertWorkflow = buffer3Factory(classname = "InsertWorkflow") insertWorkflow.execute("BogusRequestA", "BogusTask", 0, 0, 0, 0) insertWorkflow.execute("BogusRequestB", "BogusTask", 0, 0, 0, 0) checksums = {"adler32": "1234", "cksum": "5678"} testFileA = DBSBufferFile(lfn = makeUUID(), size = 1024, events = 10, checksums = checksums, locations = set(["srm-cms.cern.ch"])) testFileA.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileA.setDatasetPath(self.testDatasetA) testFileA.addRun(Run(2, *[45])) testFileA.create() testFileB = DBSBufferFile(lfn = makeUUID(), size = 1024, events = 10, checksums = checksums, locations = set(["srm-cms.cern.ch"])) testFileB.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileB.setDatasetPath(self.testDatasetA) testFileB.addRun(Run(2, *[45])) testFileB.create() testFileC = DBSBufferFile(lfn = makeUUID(), size = 1024, events = 10, checksums = checksums, locations = set(["srm-cms.cern.ch"])) testFileC.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileC.setDatasetPath(self.testDatasetA) testFileC.addRun(Run(2, *[45])) testFileC.create() self.testFilesA.append(testFileA) self.testFilesA.append(testFileB) self.testFilesA.append(testFileC) testFileD = DBSBufferFile(lfn = makeUUID(), size = 1024, events = 10, checksums = checksums, locations = set(["srm-cms.cern.ch"])) testFileD.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileD.setDatasetPath(self.testDatasetB) testFileD.addRun(Run(2, *[45])) testFileD.create() testFileE = DBSBufferFile(lfn = makeUUID(), size = 1024, events = 10, checksums = checksums, locations = set(["srm-cms.cern.ch"])) testFileE.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileE.setDatasetPath(self.testDatasetB) testFileE.addRun(Run(2, *[45])) testFileE.create() self.testFilesB.append(testFileD) self.testFilesB.append(testFileE) uploadFactory = DAOFactory(package = "WMComponent.DBSUpload.Database", logger = myThread.logger, dbinterface = myThread.dbi) createBlock = uploadFactory(classname = "SetBlockStatus") self.blockAName = self.testDatasetA + "#" + makeUUID() self.blockBName = self.testDatasetB + "#" + makeUUID() createBlock.execute(block = self.blockAName, locations = ["srm-cms.cern.ch"], open_status = 'Closed') createBlock.execute(block = self.blockBName, locations = ["srm-cms.cern.ch"], open_status = 'Closed') bufferFactory = DAOFactory(package = "WMComponent.DBSBuffer.Database", logger = myThread.logger, dbinterface = myThread.dbi) setBlock = bufferFactory(classname = "DBSBufferFiles.SetBlock") setBlock.execute(testFileA["lfn"], self.blockAName) setBlock.execute(testFileB["lfn"], self.blockAName) setBlock.execute(testFileC["lfn"], self.blockAName) setBlock.execute(testFileD["lfn"], self.blockBName) setBlock.execute(testFileE["lfn"], self.blockBName) fileStatus = bufferFactory(classname = "DBSBufferFiles.SetStatus") fileStatus.execute(testFileA["lfn"], "GLOBAL") fileStatus.execute(testFileB["lfn"], "GLOBAL") fileStatus.execute(testFileC["lfn"], "GLOBAL") fileStatus.execute(testFileD["lfn"], "GLOBAL") fileStatus.execute(testFileE["lfn"], "GLOBAL") phedexStatus = bufferFactory(classname = "DBSBufferFiles.SetPhEDExStatus") phedexStatus.execute(testFileA["lfn"], 1) phedexStatus.execute(testFileB["lfn"], 1) phedexStatus.execute(testFileC["lfn"], 1) phedexStatus.execute(testFileD["lfn"], 1) phedexStatus.execute(testFileE["lfn"], 1) associateWorkflow = buffer3Factory(classname = "DBSBufferFiles.AssociateWorkflowToFile") associateWorkflow.execute(testFileA["lfn"], "BogusRequestA", "BogusTask") associateWorkflow.execute(testFileB["lfn"], "BogusRequestA", "BogusTask") associateWorkflow.execute(testFileC["lfn"], "BogusRequestA", "BogusTask") associateWorkflow.execute(testFileD["lfn"], "BogusRequestB", "BogusTask") associateWorkflow.execute(testFileE["lfn"], "BogusRequestB", "BogusTask") # Make the desired subscriptions insertSubAction = buffer3Factory(classname = "NewSubscription") datasetA = DBSBufferDataset(path = self.testDatasetA) datasetB = DBSBufferDataset(path = self.testDatasetB) workload = WMWorkloadHelper() workload.load(os.path.join(getTestBase(), 'WMComponent_t/PhEDExInjector_t/specs/TestWorkload.pkl')) if tier0Mode: # Override the settings workload.setSubscriptionInformation(custodialSites = ["T0_CH_CERN", "T1_US_FNAL"], nonCustodialSites = ["T3_CO_Uniandes"], priority = "Normal", custodialSubType = "Replica", autoApproveSites = ["T0_CH_CERN"], dataTier = "RECO") workload.setSubscriptionInformation(custodialSites = ["T0_CH_CERN", "T1_UK_RAL"], nonCustodialSites = [], autoApproveSites = [], priority = "High", custodialSubType = "Replica", dataTier = "RAW") insertSubAction.execute(datasetA.exists(), workload.getSubscriptionInformation()[self.testDatasetA]) insertSubAction.execute(datasetB.exists(), workload.getSubscriptionInformation()[self.testDatasetB]) return
def getFiles(self, name, tier, nFiles=12, site="malpaquet", nLumis=1): """ _getFiles_ Create some dummy test files. """ files = [] (acqEra, procVer) = name.split("-") baseLFN = "/store/data/%s/Cosmics/RECO/%s/000/143/316/" % (acqEra, procVer) for f in range(nFiles): testFile = DBSBufferFile(lfn=baseLFN + makeUUID() + ".root", size=1024, events=20, checksums={"cksum": 1}) testFile.setAlgorithm(appName="cmsRun", appVer="CMSSW_3_1_1", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFile.setDatasetPath("/Cosmics/%s-%s/RECO" % (acqEra, procVer)) lumis = [] for i in range(nLumis): lumis.append((f * 1000000) + i) testFile.addRun(Run(1, *lumis)) testFile.setAcquisitionEra(acqEra) testFile.setProcessingVer("0") testFile.setGlobalTag("START54::All") testFile.create() testFile.setLocation(site) files.append(testFile) baseLFN = "/store/data/%s/Cosmics/RAW-RECO/%s/000/143/316/" % (acqEra, procVer) testFileChild = DBSBufferFile(lfn=baseLFN + makeUUID() + ".root", size=1024, events=10, checksums={'cksum': 1}) testFileChild.setAlgorithm(appName="cmsRun", appVer="CMSSW_3_1_1", appFam="RAW-RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileChild.setDatasetPath("/Cosmics/%s-%s/RAW-RECO" % (acqEra, procVer)) testFileChild.addRun(Run(1, *[45])) testFileChild.create() testFileChild.setLocation(site) testFileChild.addParents([x['lfn'] for x in files]) return files
def testAddCKSumByLFN(self): """ _testAddCKSumByLFN_ """ testFileA = DBSBufferFile(lfn="/this/is/a/lfn", size=1024, events=10) testFileA.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileA.create() setCksumAction = self.daoFactory( classname="DBSBufferFiles.AddChecksumByLFN") binds = [{ 'lfn': "/this/is/a/lfn", 'cktype': 'adler32', 'cksum': 201 }, { 'lfn': "/this/is/a/lfn", 'cktype': 'cksum', 'cksum': 101 }] setCksumAction.execute(bulkList=binds) testFileB = DBSBufferFile(id=testFileA["id"]) testFileB.load() self.assertEqual(testFileB['checksums'], { 'adler32': '201', 'cksum': '101' }) return
def addFileToDBS(self, jobReportFile, task, errorDataset = False): """ _addFileToDBS_ Add a file that was output from a job to the DBS buffer. """ datasetInfo = jobReportFile["dataset"] dbsFile = DBSBufferFile(lfn = jobReportFile["lfn"], size = jobReportFile["size"], events = jobReportFile["events"], checksums = jobReportFile["checksums"], status = "NOTUPLOADED") dbsFile.setAlgorithm(appName = datasetInfo["applicationName"], appVer = datasetInfo["applicationVersion"], appFam = jobReportFile["module_label"], psetHash = "GIBBERISH", configContent = jobReportFile.get('configURL')) if errorDataset: dbsFile.setDatasetPath("/%s/%s/%s" % (datasetInfo["primaryDataset"] + "-Error", datasetInfo["processedDataset"], datasetInfo["dataTier"])) else: dbsFile.setDatasetPath("/%s/%s/%s" % (datasetInfo["primaryDataset"], datasetInfo["processedDataset"], datasetInfo["dataTier"])) dbsFile.setValidStatus(validStatus = jobReportFile.get("validStatus", None)) dbsFile.setProcessingVer(ver = jobReportFile.get('processingVer', None)) dbsFile.setAcquisitionEra(era = jobReportFile.get('acquisitionEra', None)) dbsFile.setGlobalTag(globalTag = jobReportFile.get('globalTag', None)) #TODO need to find where to get the prep id dbsFile.setPrepID(prep_id = jobReportFile.get('prep_id', None)) dbsFile['task'] = task for run in jobReportFile["runs"]: newRun = Run(runNumber = run.run) newRun.extend(run.lumis) dbsFile.addRun(newRun) dbsFile.setLocation(pnn = list(jobReportFile["locations"])[0], immediateSave = False) self.dbsFilesToCreate.append(dbsFile) return
def testFilesize(self): """ _testFilesize_ Test storing and loading the file information from dbsbuffer_file. Make sure filesize can be bigger than 32 bits """ checksums = {"adler32": "adler32", "cksum": "cksum"} testFileA = DBSBufferFile(lfn="/this/is/a/lfn", size=3221225472, events=1500000, checksums=checksums) testFileA.setAlgorithm(appName="cmsRun", appVer="CMSSW_7_6_0", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileA.create() testFileB = DBSBufferFile(lfn=testFileA["lfn"]) testFileB.load() self.assertEqual(testFileB["size"], 3221225472, "Error: the filesize should be 3GB") self.assertEqual(testFileB["events"], 1500000, "Error: the number of events should be 1.5M")
def testAddParents(self): """ _testAddParents_ Verify that the addParents() method works correctly even if the parents do not already exist in the database. """ myThread = threading.currentThread() testFile = DBSBufferFile(lfn="/this/is/a/lfnA", size=1024, events=10, locations="se1.fnal.gov") testFile.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFile.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFile.create() testParent = DBSBufferFile(lfn="/this/is/a/lfnB", size=1024, events=10, locations="se1.fnal.gov") testParent.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testParent.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RAW") testParent.create() goldenLFNs = ["lfn1", "lfn2", "lfn3", "/this/is/a/lfnB"] testFile.addParents(goldenLFNs) verifyFile = DBSBufferFile(id=testFile["id"]) verifyFile.load(parentage=1) parentLFNs = verifyFile.getParentLFNs() for parentLFN in parentLFNs: self.assertTrue(parentLFN in goldenLFNs, "Error: unknown lfn %s" % parentLFN) goldenLFNs.remove(parentLFN) self.assertEqual(len(goldenLFNs), 0, "Error: missing LFNs...") # Check that the bogus dataset is listed as inDBS sqlCommand = """SELECT in_dbs FROM dbsbuffer_algo_dataset_assoc das INNER JOIN dbsbuffer_dataset ds ON das.dataset_id = ds.id WHERE ds.path = 'bogus'""" status = myThread.dbi.processData(sqlCommand)[0].fetchall()[0][0] self.assertEqual(status, 1) # Now make sure the dummy files are listed as being in DBS sqlCommand = """SELECT status FROM dbsbuffer_file df INNER JOIN dbsbuffer_algo_dataset_assoc das ON das.id = df.dataset_algo INNER JOIN dbsbuffer_dataset ds ON das.dataset_id = ds.id WHERE ds.path = '/bogus/dataset/path' """ status = myThread.dbi.processData(sqlCommand)[0].fetchall() for entry in status: self.assertEqual(entry, ('AlreadyInDBS', )) return
def testGetParentStatusDAO(self): """ _testGetParentStatusDAO_ Verify that the GetParentStatus DAO correctly returns the status of a file's children. """ testFileChild = DBSBufferFile(lfn="/this/is/a/child/lfnA", size=1024, events=20) testFileChild.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileChild.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileChild.create() testFile = DBSBufferFile(lfn="/this/is/a/lfn", size=1024, events=10) testFile.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFile.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFile.create() testFileChild.addParents([testFile["lfn"]]) getStatusAction = self.daoFactory( classname="DBSBufferFiles.GetParentStatus") parentStatus = getStatusAction.execute(testFileChild["lfn"]) assert len(parentStatus) == 1, \ "ERROR: Wrong number of statuses returned." assert parentStatus[0] == "NOTUPLOADED", \ "ERROR: Wrong status returned." return
def testPublishJSONCreate(self): """ Re-run testA_BasicFunctionTest with data in DBSBuffer Make sure files are generated """ # Set up uploading and write them elsewhere since the test deletes them. self.uploadPublishInfo = True self.uploadPublishDir = self.testDir # Insert some DBSFiles testFileChildA = DBSBufferFile(lfn="/this/is/a/child/lfnA", size=1024, events=20) testFileChildA.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileChildB = DBSBufferFile(lfn="/this/is/a/child/lfnB", size=1024, events=20) testFileChildB.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileChildC = DBSBufferFile(lfn="/this/is/a/child/lfnC", size=1024, events=20) testFileChildC.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileChildA.setDatasetPath("/Cosmics/USER-DATASET1-v1/USER") testFileChildB.setDatasetPath("/Cosmics/USER-DATASET1-v1/USER") testFileChildC.setDatasetPath("/Cosmics/USER-DATASET2-v1/USER") testFileChildA.create() testFileChildB.create() testFileChildC.create() testFile = DBSBufferFile(lfn="/this/is/a/lfn", size=1024, events=10) testFile.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFile.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFile.create() testFileChildA.addParents([testFile["lfn"]]) testFileChildB.addParents([testFile["lfn"]]) testFileChildC.addParents([testFile["lfn"]]) myThread = threading.currentThread() self.dbsDaoFactory = DAOFactory(package="WMComponent.DBS3Buffer", logger=myThread.logger, dbinterface=myThread.dbi) self.insertWorkflow = self.dbsDaoFactory(classname="InsertWorkflow") workflowID = self.insertWorkflow.execute( requestName='TestWorkload', taskPath='TestWorkload/Analysis', blockMaxCloseTime=100, blockMaxFiles=100, blockMaxEvents=100, blockMaxSize=100) myThread.dbi.processData( "update dbsbuffer_file set workflow=1 where id < 4") # Run the test again self.testA_BasicFunctionTest() # Reset default values self.uploadPublishInfo = False self.uploadPublishDir = None # Make sure the files are there self.assertTrue( os.path.exists( os.path.join(self.testDir, 'TestWorkload_publish.json'))) self.assertTrue( os.path.getsize( os.path.join(self.testDir, 'TestWorkload_publish.json')) > 100) self.assertTrue( os.path.exists( os.path.join(self.testDir, 'TestWorkload_publish.tgz'))) return
def testProperties(self): """ _testProperties_ Test added tags that use DBSBuffer to transfer from workload to DBS """ testFileA = DBSBufferFile(lfn="/this/is/a/lfn", size=1024, events=10) testFileA.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileA.setValidStatus(validStatus="VALID") testFileA.setProcessingVer(ver="ProcVer") testFileA.setAcquisitionEra(era="AcqEra") testFileA.setGlobalTag(globalTag="GlobalTag") testFileA.setDatasetParent(datasetParent="Parent") testFileA.create() # There are no accessors for these things because load is never called action = self.daoFactory2(classname="LoadInfoFromDAS") das = action.execute(ids=[1])[0] self.assertEqual(das['Parent'], 'Parent') self.assertEqual(das['GlobalTag'], 'GlobalTag') self.assertEqual(das['ValidStatus'], 'VALID') return
def createParentFiles(self, acqEra, nFiles = 10, workflowName = 'TestWorkload', taskPath = '/TestWorkload/DataTest'): """ _createParentFiles_ Create several parentless files in DBSBuffer. This simulates raw files in the T0. """ workflowId = self.injectWorkflow(workflowName = workflowName, taskPath = taskPath) parentlessFiles = [] baseLFN = "/store/data/%s/Cosmics/RAW/v1/000/143/316/" % (acqEra) for i in range(nFiles): testFile = DBSBufferFile(lfn = baseLFN + makeUUID() + ".root", size = 1024, events = 20, checksums = {"cksum": 1}, workflowId = workflowId) testFile.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_3_1_1", appFam = "RAW", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFile.setDatasetPath("/Cosmics/%s-v1/RAW" % (acqEra)) testFile['block_close_max_wait_time'] = 1000000 testFile['block_close_max_events'] = 1000000 testFile['block_close_max_size'] = 1000000 testFile['block_close_max_files'] = 1000000 lumis = [] for j in range(10): lumis.append((i * 10) + j) testFile.addRun(Run(143316, *lumis)) testFile.setAcquisitionEra(acqEra) testFile.setProcessingVer("1") testFile.setGlobalTag("START54::All") testFile.create() testFile.setLocation("malpaquet") parentlessFiles.append(testFile) return parentlessFiles
def createFilesWithChildren(self, moreParentFiles, acqEra): """ _createFilesWithChildren_ Create several parentless files and then create child files. """ parentFiles = [] childFiles = [] baseLFN = "/store/data/%s/Cosmics/RAW/v1/000/143/316/" % (acqEra) for i in range(10): testFile = DBSBufferFile(lfn = baseLFN + makeUUID() + ".root", size = 1024, events = 20, checksums = {"cksum": 1}) testFile.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_3_1_1", appFam = "RAW", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFile.setDatasetPath("/Cosmics/%s-v1/RAW" % (acqEra)) testFile['block_close_max_wait_time'] = 1000000 testFile['block_close_max_events'] = 1000000 testFile['block_close_max_size'] = 1000000 testFile['block_close_max_files'] = 1000000 lumis = [] for j in range(10): lumis.append((i * 10) + j) testFile.addRun(Run(143316, *lumis)) testFile.setAcquisitionEra(acqEra) testFile.setProcessingVer("1") testFile.setGlobalTag("START54::All") testFile.create() testFile.setLocation("malpaquet") parentFiles.append(testFile) baseLFN = "/store/data/%s/Cosmics/RECO/v1/000/143/316/" % (acqEra) for i in range(5): testFile = DBSBufferFile(lfn = baseLFN + makeUUID() + ".root", size = 1024, events = 20, checksums = {"cksum": 1}) testFile.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_3_1_1", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFile.setDatasetPath("/Cosmics/%s-v1/RECO" % (acqEra)) testFile['block_close_max_wait_time'] = 1000000 testFile['block_close_max_events'] = 1000000 testFile['block_close_max_size'] = 1000000 testFile['block_close_max_files'] = 1000000 lumis = [] for j in range(20): lumis.append((i * 20) + j) testFile.addRun(Run(143316, *lumis)) testFile.setAcquisitionEra(acqEra) testFile.setProcessingVer("1") testFile.setGlobalTag("START54::All") testFile.create() testFile.setLocation("malpaquet") testFile.addParents([parentFiles[i * 2]["lfn"], parentFiles[i * 2 + 1]["lfn"]]) testFile.addParents([moreParentFiles[i * 2]["lfn"], moreParentFiles[i * 2 + 1]["lfn"]]) childFiles.append(testFile) return (parentFiles, childFiles)
def testB_AlgoMigration(self): """ _AlgoMigration_ Test our ability to migrate multiple algos to global Do this by creating, mid-poll, two separate batches of files One with the same dataset but a different algo One with the same algo, but a different dataset See that they both get to global """ #raise nose.SkipTest myThread = threading.currentThread() config = self.createConfig() self.injectWorkflow(MaxWaitTime=20) name = "ThisIsATest_%s" % (makeUUID()) tier = "RECO" nFiles = 12 files = self.getFiles(name=name, tier=tier, nFiles=nFiles) datasetPath = '/%s/%s/%s' % (name, name, tier) # Load components that are necessary to check status factory = WMFactory("dbsUpload", "WMComponent.DBSUpload.Database.Interface") dbinterface = factory.loadObject("UploadToDBS") dbsInterface = DBSInterface(config=config) localAPI = dbsInterface.getAPIRef() globeAPI = dbsInterface.getAPIRef(globalRef=True) testDBSUpload = DBSUploadPoller(config=config) testDBSUpload.algorithm() # There should now be one block result = listBlocks(apiRef=globeAPI, datasetPath=datasetPath) self.assertEqual(len(result), 1) # Okay, by now, the first migration should have gone through. # Now create a second batch of files with the same dataset # but a different algo. for i in range(0, nFiles): testFile = DBSBufferFile(lfn='%s-batch2-%i' % (name, i), size=1024, events=20, checksums={'cksum': 1}, locations="malpaquet") testFile.setAlgorithm(appName="cmsRun", appVer="CMSSW_3_1_1", appFam=tier, psetHash="GIBBERISH_PART2", configContent=self.configURL) testFile.setDatasetPath(datasetPath) testFile.addRun(Run(1, *[46])) testFile.create() # Have to do things twice to get parents testDBSUpload.algorithm() testDBSUpload.algorithm() # There should now be two blocks result = listBlocks(apiRef=globeAPI, datasetPath=datasetPath) self.assertEqual(len(result), 2) # Now create another batch of files with the original algo # But in a different dataset for i in range(0, nFiles): testFile = DBSBufferFile(lfn='%s-batch3-%i' % (name, i), size=1024, events=20, checksums={'cksum': 1}, locations="malpaquet") testFile.setAlgorithm(appName=name, appVer="CMSSW_3_1_1", appFam=tier, psetHash="GIBBERISH", configContent=self.configURL) testFile.setDatasetPath('/%s/%s_3/%s' % (name, name, tier)) testFile.addRun(Run(1, *[46])) testFile.create() # Do it twice for parentage. testDBSUpload.algorithm() testDBSUpload.algorithm() # There should now be one block result = listBlocks(apiRef=globeAPI, datasetPath='/%s/%s_3/%s' % (name, name, tier)) self.assertEqual(len(result), 1) # Well, all the blocks got there, so we're done return
def getFiles(self, name, tier, nFiles=12, site="malpaquet", workflowName=None, taskPath=None, noChild=False): """ Create some quick dummy test files """ if workflowName is not None and taskPath is not None: workflowId = self.injectWorkflow(workflowName=workflowName, taskPath=taskPath) else: workflowId = self.injectWorkflow() files = [] for f in range(0, nFiles): testFile = DBSBufferFile(lfn='%s-%s-%i' % (name, site, f), size=1024, events=20, checksums={'cksum': 1}, workflowId=workflowId) testFile.setAlgorithm(appName=name, appVer="CMSSW_3_1_1", appFam="RECO", psetHash="GIBBERISH", configContent=self.configURL) testFile.setDatasetPath("/%s/%s/%s" % (name, name, tier)) testFile.addRun(Run(1, *[f])) testFile.setGlobalTag("aGlobalTag") testFile.create() testFile.setLocation(site) files.append(testFile) if not noChild: testFileChild = DBSBufferFile(lfn='%s-%s-child' % (name, site), size=1024, events=10, checksums={'cksum': 1}, workflowId=workflowId) testFileChild.setAlgorithm(appName=name, appVer="CMSSW_3_1_1", appFam="RECO", psetHash="GIBBERISH", configContent=self.configURL) testFileChild.setDatasetPath("/%s/%s_2/RECO" % (name, name)) testFileChild.addRun(Run(1, *[45])) testFileChild.setGlobalTag("aGlobalTag") testFileChild.create() testFileChild.setLocation(site) testFileChild.addParents([x['lfn'] for x in files]) return files
def testBulkLoad(self): """ _testBulkLoad_ Can we load in bulk? """ addToBuffer = DBSBufferUtil() testFileChildA = DBSBufferFile(lfn="/this/is/a/child/lfnA", size=1024, events=20) testFileChildA.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileChildA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileChildB = DBSBufferFile(lfn="/this/is/a/child/lfnB", size=1024, events=20) testFileChildB.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileChildB.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileChildC = DBSBufferFile(lfn="/this/is/a/child/lfnC", size=1024, events=20) testFileChildC.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileChildC.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileChildA.create() testFileChildB.create() testFileChildC.create() testFileChildA.setLocation(["se1.fnal.gov", "se1.cern.ch"]) testFileChildB.setLocation(["se1.fnal.gov", "se1.cern.ch"]) testFileChildC.setLocation(["se1.fnal.gov", "se1.cern.ch"]) runSet = set() runSet.add(Run(1, *[45])) runSet.add(Run(2, *[67, 68])) testFileChildA.addRunSet(runSet) testFileChildB.addRunSet(runSet) testFileChildC.addRunSet(runSet) testFileChildA.save() testFileChildB.save() testFileChildC.save() setCksumAction = self.daoFactory( classname="DBSBufferFiles.AddChecksumByLFN") binds = [{ 'lfn': "/this/is/a/child/lfnA", 'cktype': 'adler32', 'cksum': 201 }, { 'lfn': "/this/is/a/child/lfnA", 'cktype': 'cksum', 'cksum': 101 }, { 'lfn': "/this/is/a/child/lfnB", 'cktype': 'adler32', 'cksum': 201 }, { 'lfn': "/this/is/a/child/lfnB", 'cktype': 'cksum', 'cksum': 101 }, { 'lfn': "/this/is/a/child/lfnC", 'cktype': 'adler32', 'cksum': 201 }, { 'lfn': "/this/is/a/child/lfnC", 'cktype': 'cksum', 'cksum': 101 }] setCksumAction.execute(bulkList=binds) testFile = DBSBufferFile(lfn="/this/is/a/lfn", size=1024, events=10) testFile.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFile.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFile.create() testFileChildA.addParents([testFile["lfn"]]) testFileChildB.addParents([testFile["lfn"]]) testFileChildC.addParents([testFile["lfn"]]) binds = [{ 'id': testFileChildA.exists() }, { 'id': testFileChildB.exists() }, { 'id': testFileChildC.exists() }] listOfFiles = addToBuffer.loadDBSBufferFilesBulk(fileObjs=binds) #print listOfFiles compareList = [ 'locations', 'psetHash', 'configContent', 'appName', 'appVer', 'appFam', 'events', 'datasetPath', 'runs' ] for f in listOfFiles: self.assertTrue( f['lfn'] in [ "/this/is/a/child/lfnA", "/this/is/a/child/lfnB", "/this/is/a/child/lfnC" ], "Unknown file in loaded results") self.assertEqual(f['checksums'], { 'adler32': '201', 'cksum': '101' }) for parent in f['parents']: self.assertEqual(parent['lfn'], testFile['lfn']) for key in compareList: self.assertEqual(f[key], testFileChildA[key])
def addFile(self, file, dataset=0): """ Add the file to the buffer """ myThread = threading.currentThread() existingTransaction = self.beginTransaction() bufferFile = DBSBufferFile(lfn = file['LFN'], size = file['Size'], events = file['TotalEvents'], cksum=file['Checksum'], dataset=dataset) runLumiList = file.getLumiSections() runList = [x['RunNumber'] for x in runLumiList] for runNumber in runList: lumis = [int(y['LumiSectionNumber']) for y in runLumiList if y['RunNumber']==runNumber] run=Run(runNumber, *lumis) bufferFile.addRun(run) if bufferFile.exists() == False: bufferFile.create() bufferFile.setLocation(se=file['SEName'], immediateSave = True) else: bufferFile.load() # Lets add the file to DBS Buffer as well #UPDATE File Count self.updateDSFileCount(dataset=dataset) #Parent files bufferFile.addParents(file.inputFiles) self.commitTransaction(existingTransaction) return
def testDeleteTransaction(self): """ _testDeleteTransaction_ Create a file and commit it to the database. Start a new transaction and delete the file. Rollback the transaction after the file has been deleted. Use the file class's exists() method to verify that the file does not exist after it has been deleted but does exist after the transaction is rolled back. """ testFile = DBSBufferFile(lfn="/this/is/a/lfn", size=1024, events=10) testFile.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFile.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") assert testFile.exists() == False, \ "ERROR: File exists before it was created" testFile.addRun(Run(1, *[45])) testFile.create() assert testFile.exists() > 0, \ "ERROR: File does not exist after it was created" myThread = threading.currentThread() myThread.transaction.begin() testFile.delete() assert testFile.exists() == False, \ "ERROR: File exists after it has been deleted" myThread.transaction.rollback() assert testFile.exists() > 0, \ "ERROR: File does not exist after transaction was rolled back." return
def testCountFilesDAO(self): """ _testCountFilesDAO_ Verify that the CountFiles DAO object functions correctly. """ testFileA = DBSBufferFile(lfn="/this/is/a/lfnA", size=1024, events=10, locations="se1.fnal.gov") testFileA.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileA.create() testFileB = DBSBufferFile(lfn="/this/is/a/lfnB", size=1024, events=10, locations="se1.fnal.gov") testFileB.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileB.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileB.create() testFileC = DBSBufferFile(lfn="/this/is/a/lfnC", size=1024, events=10, locations="se1.fnal.gov") testFileC.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileC.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileC.create() countAction = self.daoFactory(classname="CountFiles") assert countAction.execute() == 3, \ "Error: Wrong number of files counted in DBS Buffer." return
def testCreateDeleteExists(self): """ _testCreateDeleteExists_ Test the create(), delete() and exists() methods of the file class by creating and deleting a file. The exists() method will be called before and after creation and after deletion. """ testFile = DBSBufferFile(lfn = "/this/is/a/lfn", size = 1024, events = 10) testFile.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFile.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") assert testFile.exists() == False, \ "ERROR: File exists before it was created" testFile.addRun(Run(1, *[45])) testFile.create() assert testFile.exists() > 0, \ "ERROR: File does not exist after it was created" testFile.delete() assert testFile.exists() == False, \ "ERROR: File exists after it has been deleted" return
def testGetChildrenDAO(self): """ _testGetChildrenDAO_ Verify that the GetChildren DAO correctly returns the LFNs of a file's children. """ testFileChildA = DBSBufferFile(lfn="/this/is/a/child/lfnA", size=1024, events=20) testFileChildA.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileChildA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileChildB = DBSBufferFile(lfn="/this/is/a/child/lfnB", size=1024, events=20) testFileChildB.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileChildB.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileChildC = DBSBufferFile(lfn="/this/is/a/child/lfnC", size=1024, events=20) testFileChildC.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileChildC.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileChildA.create() testFileChildB.create() testFileChildC.create() testFile = DBSBufferFile(lfn="/this/is/a/lfn", size=1024, events=10) testFile.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFile.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFile.create() testFileChildA.addParents([testFile["lfn"]]) testFileChildB.addParents([testFile["lfn"]]) testFileChildC.addParents([testFile["lfn"]]) getChildrenAction = self.daoFactory( classname="DBSBufferFiles.GetChildren") childLFNs = getChildrenAction.execute(testFile["lfn"]) assert len(childLFNs) == 3, \ "ERROR: Parent does not have the right amount of children." goldenLFNs = [ "/this/is/a/child/lfnA", "/this/is/a/child/lfnB", "/this/is/a/child/lfnC" ] for childLFN in childLFNs: assert childLFN in goldenLFNs, \ "ERROR: Unknown child lfn" goldenLFNs.remove(childLFN) return
def testLoad(self): """ _testLoad_ Test the loading of file meta data using the ID of a file and the LFN of a file. """ checksums = {"adler32": "adler32", "cksum": "cksum", "md5": "md5"} testFileA = DBSBufferFile(lfn = "/this/is/a/lfn", size = 1024, events = 10, checksums = checksums) testFileA.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileA.create() testFileB = DBSBufferFile(lfn = testFileA["lfn"]) testFileB.load() testFileC = DBSBufferFile(id = testFileA["id"]) testFileC.load() assert testFileA == testFileB, \ "ERROR: File load by LFN didn't work" assert testFileA == testFileC, \ "ERROR: File load by ID didn't work" assert type(testFileB["id"]) == int or type(testFileB["id"]) == long, \ "ERROR: File id is not an integer type." assert type(testFileB["size"]) == int or type(testFileB["size"]) == long, \ "ERROR: File size is not an integer type." assert type(testFileB["events"]) == int or type(testFileB["events"]) == long, \ "ERROR: File events is not an integer type." assert type(testFileC["id"]) == int or type(testFileC["id"]) == long, \ "ERROR: File id is not an integer type." assert type(testFileC["size"]) == int or type(testFileC["size"]) == long, \ "ERROR: File size is not an integer type." assert type(testFileC["events"]) == int or type(testFileC["events"]) == long, \ "ERROR: File events is not an integer type." testFileA.delete() return
def getFiles(self, name, tier='RECO', nFiles=12, site="malpaquet", nLumis=1): """ Create some quick dummy test files """ files = [] for f in range(nFiles): testFile = DBSBufferFile( lfn='/data/store/random/random/RANDOM/test/0/%s-%s-%i.root' % (name, site, f), size=1024, events=20, checksums={'cksum': 1}) testFile.setAlgorithm(appName=name, appVer="CMSSW_3_1_1", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFile.setDatasetPath("/%s/%s/%s" % (name, name, tier)) lumis = [] for i in range(nLumis): lumis.append((f * 100000) + i) testFile.addRun(Run(1, *lumis)) testFile.setAcquisitionEra(name.split('-')[0]) testFile.setProcessingVer("0") testFile.setGlobalTag("Weird") testFile.create() testFile.setLocation(site) files.append(testFile) count = 0 for f in files: count += 1 testFileChild = DBSBufferFile( lfn= '/data/store/random/random/RANDOM/test/0/%s-%s-%i-child.root' % (name, site, count), size=1024, events=10, checksums={'cksum': 1}) testFileChild.setAlgorithm(appName=name, appVer="CMSSW_3_1_1", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileChild.setDatasetPath("/%s/%s_2/RECO" % (name, name)) testFileChild.addRun(Run(1, *[45])) testFileChild.create() testFileChild.setLocation(site) testFileChild.addParents([f['lfn']]) return files
def testSetLocation(self): """ _testSetLocation_ Create a file and add a couple locations. Load the file from the database to make sure that the locations were set correctly. """ testFileA = DBSBufferFile(lfn = "/this/is/a/lfn", size = 1024, events = 10) testFileA.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileA.addRun(Run( 1, *[45])) testFileA.create() testFileA.setLocation(["se1.fnal.gov", "se1.cern.ch"]) testFileA.setLocation(["bunkse1.fnal.gov", "bunkse1.cern.ch"], immediateSave = False) testFileB = DBSBufferFile(id = testFileA["id"]) testFileB.load() goldenLocations = ["se1.fnal.gov", "se1.cern.ch"] for location in testFileB["locations"]: assert location in goldenLocations, \ "ERROR: Unknown file location" goldenLocations.remove(location) assert len(goldenLocations) == 0, \ "ERROR: Some locations are missing" return
def stuffDatabase(self): """ _stuffDatabase_ Fill the dbsbuffer with some files and blocks. We'll insert a total of 5 files spanning two blocks. There will be a total of two datasets inserted into the datbase. We'll inject files with the location set as an SE name as well as a PhEDEx node name as well. """ myThread = threading.currentThread() buffer3Factory = DAOFactory(package="WMComponent.DBS3Buffer", logger=myThread.logger, dbinterface=myThread.dbi) insertWorkflow = buffer3Factory(classname="InsertWorkflow") insertWorkflow.execute("BogusRequest", "BogusTask", 0, 0, 0, 0) checksums = {"adler32": "1234", "cksum": "5678"} testFileA = DBSBufferFile(lfn=makeUUID(), size=1024, events=10, checksums=checksums, locations=set(["srm-cms.cern.ch"])) testFileA.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileA.setDatasetPath(self.testDatasetA) testFileA.addRun(Run(2, *[45])) testFileA.create() testFileB = DBSBufferFile(lfn=makeUUID(), size=1024, events=10, checksums=checksums, locations=set(["srm-cms.cern.ch"])) testFileB.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileB.setDatasetPath(self.testDatasetA) testFileB.addRun(Run(2, *[45])) testFileB.create() testFileC = DBSBufferFile(lfn=makeUUID(), size=1024, events=10, checksums=checksums, locations=set(["srm-cms.cern.ch"])) testFileC.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileC.setDatasetPath(self.testDatasetA) testFileC.addRun(Run(2, *[45])) testFileC.create() self.testFilesA.append(testFileA) self.testFilesA.append(testFileB) self.testFilesA.append(testFileC) testFileD = DBSBufferFile(lfn=makeUUID(), size=1024, events=10, checksums=checksums, locations=set(["srm-cms.cern.ch"])) testFileD.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileD.setDatasetPath(self.testDatasetB) testFileD.addRun(Run(2, *[45])) testFileD.create() testFileE = DBSBufferFile(lfn=makeUUID(), size=1024, events=10, checksums=checksums, locations=set(["srm-cms.cern.ch"])) testFileE.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileE.setDatasetPath(self.testDatasetB) testFileE.addRun(Run(2, *[45])) testFileE.create() self.testFilesB.append(testFileD) self.testFilesB.append(testFileE) uploadFactory = DAOFactory(package="WMComponent.DBS3Buffer", logger=myThread.logger, dbinterface=myThread.dbi) datasetAction = uploadFactory(classname="NewDataset") createAction = uploadFactory(classname="CreateBlocks") datasetAction.execute(datasetPath=self.testDatasetA) datasetAction.execute(datasetPath=self.testDatasetB) self.blockAName = self.testDatasetA + "#" + makeUUID() self.blockBName = self.testDatasetB + "#" + makeUUID() newBlockA = DBSBufferBlock(name=self.blockAName, location="srm-cms.cern.ch", datasetpath=None) newBlockA.setDataset(self.testDatasetA, 'data', 'VALID') newBlockA.status = 'Closed' newBlockB = DBSBufferBlock(name=self.blockBName, location="srm-cms.cern.ch", datasetpath=None) newBlockB.setDataset(self.testDatasetB, 'data', 'VALID') newBlockB.status = 'Closed' createAction.execute(blocks=[newBlockA, newBlockB]) bufferFactory = DAOFactory(package="WMComponent.DBS3Buffer", logger=myThread.logger, dbinterface=myThread.dbi) setBlock = bufferFactory(classname="DBSBufferFiles.SetBlock") setBlock.execute(testFileA["lfn"], self.blockAName) setBlock.execute(testFileB["lfn"], self.blockAName) setBlock.execute(testFileC["lfn"], self.blockAName) setBlock.execute(testFileD["lfn"], self.blockBName) setBlock.execute(testFileE["lfn"], self.blockBName) fileStatus = bufferFactory(classname="DBSBufferFiles.SetStatus") fileStatus.execute(testFileA["lfn"], "LOCAL") fileStatus.execute(testFileB["lfn"], "LOCAL") fileStatus.execute(testFileC["lfn"], "LOCAL") fileStatus.execute(testFileD["lfn"], "LOCAL") fileStatus.execute(testFileE["lfn"], "LOCAL") associateWorkflow = buffer3Factory( classname="DBSBufferFiles.AssociateWorkflowToFile") associateWorkflow.execute(testFileA["lfn"], "BogusRequest", "BogusTask") associateWorkflow.execute(testFileB["lfn"], "BogusRequest", "BogusTask") associateWorkflow.execute(testFileC["lfn"], "BogusRequest", "BogusTask") associateWorkflow.execute(testFileD["lfn"], "BogusRequest", "BogusTask") associateWorkflow.execute(testFileE["lfn"], "BogusRequest", "BogusTask") return
def testLocationsConstructor(self): """ _testLocationsConstructor_ Test to make sure that locations passed into the File() constructor are loaded from and save to the database correctly. Also test to make sure that the class behaves well when the location is passed in as a single string instead of a set. """ testFileA = DBSBufferFile(lfn = "/this/is/a/lfn", size = 1024, events = 10, locations = set(["se1.fnal.gov"])) testFileA.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileA.addRun(Run( 1, *[45])) testFileA.create() testFileB = DBSBufferFile(lfn = "/this/is/a/lfn2", size = 1024, events = 10, locations = "se1.fnal.gov") testFileB.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileB.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileB.addRun(Run( 1, *[45])) testFileB.create() testFileC = DBSBufferFile(id = testFileA["id"]) testFileC.load() goldenLocations = ["se1.fnal.gov"] for location in testFileC["locations"]: assert location in goldenLocations, \ "ERROR: Unknown file location" goldenLocations.remove(location) assert len(goldenLocations) == 0, \ "ERROR: Some locations are missing" testFileC = DBSBufferFile(id = testFileB["id"]) testFileC.load() goldenLocations = ["se1.fnal.gov"] for location in testFileC["locations"]: assert location in goldenLocations, \ "ERROR: Unknown file location" goldenLocations.remove(location) assert len(goldenLocations) == 0, \ "ERROR: Some locations are missing" return
def testProperties(self): """ _testProperties_ Test added tags that use DBSBuffer to transfer from workload to DBS """ testFileA = DBSBufferFile(lfn="/this/is/a/lfn", size=1024, events=10) testFileA.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileA.setValidStatus(validStatus="VALID") testFileA.setProcessingVer(ver="ProcVer") testFileA.setAcquisitionEra(era="AcqEra") testFileA.setGlobalTag(globalTag="GlobalTag") testFileA.setDatasetParent(datasetParent="Parent") testFileA.create() return
def stuffDatabase(self, custodialSite="srm-cms.cern.ch", spec="TestWorkload.pkl"): """ _stuffDatabase_ Fill the dbsbuffer with some files and blocks. We'll insert a total of 5 files spanning two blocks. There will be a total of two datasets inserted into the datbase. We'll inject files with the location set as an SE name as well as a PhEDEx node name as well. """ myThread = threading.currentThread() buffer3Factory = DAOFactory(package="WMComponent.DBS3Buffer", logger=myThread.logger, dbinterface=myThread.dbi) insertWorkflow = buffer3Factory(classname="InsertWorkflow") insertWorkflow.execute( "BogusRequest", "BogusTask", os.path.join(getTestBase(), "WMComponent_t/PhEDExInjector_t/specs/%s" % spec)) checksums = {"adler32": "1234", "cksum": "5678"} testFileA = DBSBufferFile(lfn=makeUUID(), size=1024, events=10, checksums=checksums, locations=set(["srm-cms.cern.ch"])) testFileA.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileA.setDatasetPath(self.testDatasetA) testFileA.setCustodialSite(custodialSite=custodialSite) testFileA.addRun(Run(2, *[45])) testFileA.create() testFileB = DBSBufferFile(lfn=makeUUID(), size=1024, events=10, checksums=checksums, locations=set(["srm-cms.cern.ch"])) testFileB.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileB.setDatasetPath(self.testDatasetA) testFileB.setCustodialSite(custodialSite=custodialSite) testFileB.addRun(Run(2, *[45])) testFileB.create() testFileC = DBSBufferFile(lfn=makeUUID(), size=1024, events=10, checksums=checksums, locations=set(["srm-cms.cern.ch"])) testFileC.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileC.setDatasetPath(self.testDatasetA) testFileC.setCustodialSite(custodialSite=custodialSite) testFileC.addRun(Run(2, *[45])) testFileC.create() self.testFilesA.append(testFileA) self.testFilesA.append(testFileB) self.testFilesA.append(testFileC) testFileD = DBSBufferFile(lfn=makeUUID(), size=1024, events=10, checksums=checksums, locations=set(["srm-cms.cern.ch"])) testFileD.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileD.setDatasetPath(self.testDatasetB) testFileD.setCustodialSite(custodialSite=custodialSite) testFileD.addRun(Run(2, *[45])) testFileD.create() testFileE = DBSBufferFile(lfn=makeUUID(), size=1024, events=10, checksums=checksums, locations=set(["srm-cms.cern.ch"])) testFileE.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileE.setDatasetPath(self.testDatasetB) testFileE.setCustodialSite(custodialSite=custodialSite) testFileE.addRun(Run(2, *[45])) testFileE.create() self.testFilesB.append(testFileD) self.testFilesB.append(testFileE) myThread = threading.currentThread() uploadFactory = DAOFactory(package="WMComponent.DBSUpload.Database", logger=myThread.logger, dbinterface=myThread.dbi) createBlock = uploadFactory(classname="SetBlockStatus") self.blockAName = self.testDatasetA + "#" + makeUUID() self.blockBName = self.testDatasetB + "#" + makeUUID() createBlock.execute(block=self.blockAName, locations=["srm-cms.cern.ch"], open_status=1) createBlock.execute(block=self.blockBName, locations=["srm-cms.cern.ch"], open_status=1) bufferFactory = DAOFactory(package="WMComponent.DBSBuffer.Database", logger=myThread.logger, dbinterface=myThread.dbi) setBlock = bufferFactory(classname="DBSBufferFiles.SetBlock") setBlock.execute(testFileA["lfn"], self.blockAName) setBlock.execute(testFileB["lfn"], self.blockAName) setBlock.execute(testFileC["lfn"], self.blockAName) setBlock.execute(testFileD["lfn"], self.blockBName) setBlock.execute(testFileE["lfn"], self.blockBName) fileStatus = bufferFactory(classname="DBSBufferFiles.SetStatus") fileStatus.execute(testFileA["lfn"], "LOCAL") fileStatus.execute(testFileB["lfn"], "LOCAL") fileStatus.execute(testFileC["lfn"], "LOCAL") fileStatus.execute(testFileD["lfn"], "LOCAL") fileStatus.execute(testFileE["lfn"], "LOCAL") associateWorkflow = buffer3Factory( classname="DBSBufferFiles.AssociateWorkflowToFile") associateWorkflow.execute(testFileA["lfn"], "BogusRequest", "BogusTask") associateWorkflow.execute(testFileB["lfn"], "BogusRequest", "BogusTask") associateWorkflow.execute(testFileC["lfn"], "BogusRequest", "BogusTask") associateWorkflow.execute(testFileD["lfn"], "BogusRequest", "BogusTask") associateWorkflow.execute(testFileE["lfn"], "BogusRequest", "BogusTask") return
def testAddParents(self): """ _testAddParents_ Verify that the addParents() method works correctly even if the parents do not already exist in the database. """ myThread = threading.currentThread() testFile = DBSBufferFile(lfn = "/this/is/a/lfnA", size = 1024, events = 10, locations = "se1.fnal.gov") testFile.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFile.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFile.create() testParent = DBSBufferFile(lfn = "/this/is/a/lfnB", size = 1024, events = 10, locations = "se1.fnal.gov") testParent.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testParent.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RAW") testParent.create() goldenLFNs = ["lfn1", "lfn2", "lfn3", "/this/is/a/lfnB"] testFile.addParents(goldenLFNs) verifyFile = DBSBufferFile(id = testFile["id"]) verifyFile.load(parentage = 1) parentLFNs = verifyFile.getParentLFNs() for parentLFN in parentLFNs: self.assertTrue(parentLFN in goldenLFNs, "Error: unknown lfn %s" % parentLFN) goldenLFNs.remove(parentLFN) self.assertEqual(len(goldenLFNs), 0, "Error: missing LFNs...") # Check that the bogus dataset is listed as inDBS sqlCommand = """SELECT in_dbs FROM dbsbuffer_algo_dataset_assoc das INNER JOIN dbsbuffer_dataset ds ON das.dataset_id = ds.id WHERE ds.path = 'bogus'""" status = myThread.dbi.processData(sqlCommand)[0].fetchall()[0][0] self.assertEqual(status, 1) # Now make sure the dummy files are listed as being in DBS sqlCommand = """SELECT status FROM dbsbuffer_file df INNER JOIN dbsbuffer_algo_dataset_assoc das ON das.id = df.dataset_algo INNER JOIN dbsbuffer_dataset ds ON das.dataset_id = ds.id WHERE ds.path = '/bogus/dataset/path' """ status = myThread.dbi.processData(sqlCommand)[0].fetchall() for entry in status: self.assertEqual(entry, ('AlreadyInDBS',)) return
def testSetLocationByLFN(self): """ _testSetLocationByLFN_ """ testFileA = DBSBufferFile(lfn="/this/is/a/lfn", size=1024, events=10) testFileA.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileA.addRun(Run(1, *[45])) testFileA.create() setLocationAction = self.daoFactory( classname="DBSBufferFiles.SetLocationByLFN") setLocationAction.execute(binds={ 'lfn': "/this/is/a/lfn", 'sename': 'se1.cern.ch' }) testFileB = DBSBufferFile(id=testFileA["id"]) testFileB.load() self.assertEqual(testFileB['locations'], set(['se1.cern.ch'])) return
def testAddChildTransaction(self): """ _testAddChildTransaction_ Add a child to some parent files and make sure that all the parentage information is loaded/stored correctly from the database. Rollback the addition of one of the childs and then verify that it does in fact only have one parent. """ testFileParentA = DBSBufferFile(lfn = "/this/is/a/parent/lfnA", size = 1024, events = 20) testFileParentA.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileParentA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileParentA.addRun(Run( 1, *[45])) testFileParentB = DBSBufferFile(lfn = "/this/is/a/parent/lfnB", size = 1024, events = 20) testFileParentB.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileParentB.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileParentB.addRun(Run( 1, *[45])) testFileParentA.create() testFileParentB.create() testFileA = DBSBufferFile(lfn = "/this/is/a/lfn", size = 1024, events = 10) testFileA.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileA.addRun(Run( 1, *[45])) testFileA.create() testFileParentA.addChildren("/this/is/a/lfn") myThread = threading.currentThread() myThread.transaction.begin() testFileParentB.addChildren("/this/is/a/lfn") testFileB = DBSBufferFile(id = testFileA["id"]) testFileB.load(parentage = 1) goldenFiles = [testFileParentA, testFileParentB] for parentFile in testFileB["parents"]: assert parentFile in goldenFiles, \ "ERROR: Unknown parent file" goldenFiles.remove(parentFile) assert len(goldenFiles) == 0, \ "ERROR: Some parents are missing" myThread.transaction.rollback() testFileB.load(parentage = 1) goldenFiles = [testFileParentA] for parentFile in testFileB["parents"]: assert parentFile in goldenFiles, \ "ERROR: Unknown parent file" goldenFiles.remove(parentFile) assert len(goldenFiles) == 0, \ "ERROR: Some parents are missing" return
def testGetParentStatusDAO(self): """ _testGetParentStatusDAO_ Verify that the GetParentStatus DAO correctly returns the status of a file's children. """ testFileChild = DBSBufferFile(lfn = "/this/is/a/child/lfnA", size = 1024, events = 20) testFileChild.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileChild.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileChild.create() testFile = DBSBufferFile(lfn = "/this/is/a/lfn", size = 1024, events = 10) testFile.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFile.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFile.create() testFileChild.addParents([testFile["lfn"]]) getStatusAction = self.daoFactory(classname = "DBSBufferFiles.GetParentStatus") parentStatus = getStatusAction.execute(testFileChild["lfn"]) assert len(parentStatus) == 1, \ "ERROR: Wrong number of statuses returned." assert parentStatus[0] == "NOTUPLOADED", \ "ERROR: Wrong status returned." return
def testSetLocationTransaction(self): """ _testSetLocationTransaction_ Create a file at specific locations and commit everything to the database. Reload the file from the database and verify that the locations are correct. Rollback the database transaction and once again reload the file. Verify that the original locations are back. """ testFileA = DBSBufferFile(lfn = "/this/is/a/lfn", size = 1024, events = 10) testFileA.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileA.addRun(Run( 1, *[45])) testFileA.create() testFileA.setLocation(["se1.fnal.gov"]) myThread = threading.currentThread() myThread.transaction.begin() testFileA.setLocation(["se1.cern.ch"]) testFileA.setLocation(["bunkse1.fnal.gov", "bunkse1.cern.ch"], immediateSave = False) testFileB = DBSBufferFile(id = testFileA["id"]) testFileB.load() goldenLocations = ["se1.fnal.gov", "se1.cern.ch"] for location in testFileB["locations"]: assert location in goldenLocations, \ "ERROR: Unknown file location" goldenLocations.remove(location) assert len(goldenLocations) == 0, \ "ERROR: Some locations are missing" myThread.transaction.rollback() testFileB.load() goldenLocations = ["se1.fnal.gov"] for location in testFileB["locations"]: assert location in goldenLocations, \ "ERROR: Unknown file location" goldenLocations.remove(location) assert len(goldenLocations) == 0, \ "ERROR: Some locations are missing" return
def testCreateTransaction(self): """ _testCreateTransaction_ Begin a transaction and then create a file in the database. Afterwards, rollback the transaction. Use the File class's exists() method to to verify that the file doesn't exist before it was created, exists after it was created and doesn't exist after the transaction was rolled back. """ myThread = threading.currentThread() myThread.transaction.begin() testFile = DBSBufferFile(lfn = "/this/is/a/lfn", size = 1024, events = 10) testFile.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFile.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") assert testFile.exists() == False, \ "ERROR: File exists before it was created" testFile.addRun(Run(1, *[45])) testFile.create() assert testFile.exists() > 0, \ "ERROR: File does not exist after it was created" myThread.transaction.rollback() assert testFile.exists() == False, \ "ERROR: File exists after transaction was rolled back." return
def testGetParentLFNs(self): """ _testGetParentLFNs_ Create three files and set them to be parents of a fourth file. Check to make sure that getParentLFNs() on the child file returns the correct LFNs. """ testFileParentA = DBSBufferFile(lfn="/this/is/a/parent/lfnA", size=1024, events=20) testFileParentA.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileParentA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileParentA.addRun(Run(1, *[45])) testFileParentB = DBSBufferFile(lfn="/this/is/a/parent/lfnB", size=1024, events=20) testFileParentB.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileParentB.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileParentB.addRun(Run(1, *[45])) testFileParentC = DBSBufferFile(lfn="/this/is/a/parent/lfnC", size=1024, events=20) testFileParentC.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileParentC.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileParentC.addRun(Run(1, *[45])) testFileParentA.create() testFileParentB.create() testFileParentC.create() testFile = DBSBufferFile(lfn="/this/is/a/lfn", size=1024, events=10) testFile.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFile.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFile.addRun(Run(1, *[45])) testFile.create() testFile.addParents([ testFileParentA["lfn"], testFileParentB["lfn"], testFileParentC["lfn"] ]) parentLFNs = testFile.getParentLFNs() assert len(parentLFNs) == 3, \ "ERROR: Child does not have the right amount of parents" goldenLFNs = [ "/this/is/a/parent/lfnA", "/this/is/a/parent/lfnB", "/this/is/a/parent/lfnC" ] for parentLFN in parentLFNs: assert parentLFN in goldenLFNs, \ "ERROR: Unknown parent lfn" goldenLFNs.remove(parentLFN) testFile.delete() testFileParentA.delete() testFileParentB.delete() testFileParentC.delete() return
def stuffDatabase(self): """ _stuffDatabase_ Fill the dbsbuffer with some files and blocks. We'll insert a total of 5 files spanning two blocks. There will be a total of two datasets inserted into the database. All files will be already in GLOBAL and in_phedex """ myThread = threading.currentThread() buffer3Factory = DAOFactory(package="WMComponent.DBS3Buffer", logger=myThread.logger, dbinterface=myThread.dbi) insertWorkflow = buffer3Factory(classname="InsertWorkflow") insertWorkflow.execute("BogusRequestA", "BogusTask", 0, 0, 0, 0) insertWorkflow.execute("BogusRequestB", "BogusTask", 0, 0, 0, 0) checksums = {"adler32": "1234", "cksum": "5678"} testFileA = DBSBufferFile(lfn=makeUUID(), size=1024, events=10, checksums=checksums, locations=set(["srm-cms.cern.ch"])) testFileA.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileA.setDatasetPath(self.testDatasetA) testFileA.addRun(Run(2, *[45])) testFileA.create() testFileB = DBSBufferFile(lfn=makeUUID(), size=1024, events=10, checksums=checksums, locations=set(["srm-cms.cern.ch"])) testFileB.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileB.setDatasetPath(self.testDatasetA) testFileB.addRun(Run(2, *[45])) testFileB.create() testFileC = DBSBufferFile(lfn=makeUUID(), size=1024, events=10, checksums=checksums, locations=set(["srm-cms.cern.ch"])) testFileC.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileC.setDatasetPath(self.testDatasetA) testFileC.addRun(Run(2, *[45])) testFileC.create() self.testFilesA.append(testFileA) self.testFilesA.append(testFileB) self.testFilesA.append(testFileC) testFileD = DBSBufferFile(lfn=makeUUID(), size=1024, events=10, checksums=checksums, locations=set(["srm-cms.cern.ch"])) testFileD.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileD.setDatasetPath(self.testDatasetB) testFileD.addRun(Run(2, *[45])) testFileD.create() testFileE = DBSBufferFile(lfn=makeUUID(), size=1024, events=10, checksums=checksums, locations=set(["srm-cms.cern.ch"])) testFileE.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileE.setDatasetPath(self.testDatasetB) testFileE.addRun(Run(2, *[45])) testFileE.create() self.testFilesB.append(testFileD) self.testFilesB.append(testFileE) uploadFactory = DAOFactory(package="WMComponent.DBS3Buffer", logger=myThread.logger, dbinterface=myThread.dbi) datasetAction = uploadFactory(classname="NewDataset") createAction = uploadFactory(classname="CreateBlocks") datasetAction.execute(datasetPath=self.testDatasetA) datasetAction.execute(datasetPath=self.testDatasetB) self.blockAName = self.testDatasetA + "#" + makeUUID() self.blockBName = self.testDatasetB + "#" + makeUUID() newBlockA = DBSBufferBlock(name=self.blockAName, location="srm-cms.cern.ch", datasetpath=None) newBlockA.setDataset(self.testDatasetA, 'data', 'VALID') newBlockA.status = 'Closed' newBlockB = DBSBufferBlock(name=self.blockBName, location="srm-cms.cern.ch", datasetpath=None) newBlockB.setDataset(self.testDatasetB, 'data', 'VALID') newBlockB.status = 'Closed' createAction.execute(blocks=[newBlockA, newBlockB]) bufferFactory = DAOFactory(package="WMComponent.DBS3Buffer", logger=myThread.logger, dbinterface=myThread.dbi) setBlock = bufferFactory(classname="DBSBufferFiles.SetBlock") setBlock.execute(testFileA["lfn"], self.blockAName) setBlock.execute(testFileB["lfn"], self.blockAName) setBlock.execute(testFileC["lfn"], self.blockAName) setBlock.execute(testFileD["lfn"], self.blockBName) setBlock.execute(testFileE["lfn"], self.blockBName) fileStatus = bufferFactory(classname="DBSBufferFiles.SetStatus") fileStatus.execute(testFileA["lfn"], "GLOBAL") fileStatus.execute(testFileB["lfn"], "GLOBAL") fileStatus.execute(testFileC["lfn"], "GLOBAL") fileStatus.execute(testFileD["lfn"], "GLOBAL") fileStatus.execute(testFileE["lfn"], "GLOBAL") phedexStatus = bufferFactory( classname="DBSBufferFiles.SetPhEDExStatus") phedexStatus.execute(testFileA["lfn"], 1) phedexStatus.execute(testFileB["lfn"], 1) phedexStatus.execute(testFileC["lfn"], 1) phedexStatus.execute(testFileD["lfn"], 1) phedexStatus.execute(testFileE["lfn"], 1) associateWorkflow = buffer3Factory( classname="DBSBufferFiles.AssociateWorkflowToFile") associateWorkflow.execute(testFileA["lfn"], "BogusRequestA", "BogusTask") associateWorkflow.execute(testFileB["lfn"], "BogusRequestA", "BogusTask") associateWorkflow.execute(testFileC["lfn"], "BogusRequestA", "BogusTask") associateWorkflow.execute(testFileD["lfn"], "BogusRequestB", "BogusTask") associateWorkflow.execute(testFileE["lfn"], "BogusRequestB", "BogusTask") # Make the desired subscriptions insertSubAction = buffer3Factory(classname="NewSubscription") datasetA = DBSBufferDataset(path=self.testDatasetA) datasetB = DBSBufferDataset(path=self.testDatasetB) workload = WMWorkloadHelper() workload.load( os.path.join( getTestBase(), 'WMComponent_t/PhEDExInjector_t/specs/TestWorkload.pkl')) insertSubAction.execute( datasetA.exists(), workload.getSubscriptionInformation()[self.testDatasetA]) insertSubAction.execute( datasetB.exists(), workload.getSubscriptionInformation()[self.testDatasetB]) return
def testCountFilesDAO(self): """ _testCountFilesDAO_ Verify that the CountFiles DAO object functions correctly. """ testFileA = DBSBufferFile(lfn = "/this/is/a/lfnA", size = 1024, events = 10, locations = "se1.fnal.gov") testFileA.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileA.create() testFileB = DBSBufferFile(lfn = "/this/is/a/lfnB", size = 1024, events = 10, locations = "se1.fnal.gov") testFileB.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileB.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileB.create() testFileC = DBSBufferFile(lfn = "/this/is/a/lfnC", size = 1024, events = 10, locations = "se1.fnal.gov") testFileC.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileC.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileC.create() countAction = self.daoFactory(classname = "CountFiles") assert countAction.execute() == 3, \ "Error: Wrong number of files counted in DBS Buffer." return
def testLoad(self): """ _testLoad_ Test the loading of file meta data using the ID of a file and the LFN of a file. """ checksums = {"adler32": "adler32", "cksum": "cksum", "md5": "md5"} testFileA = DBSBufferFile(lfn="/this/is/a/lfn", size=1024, events=10, checksums=checksums) testFileA.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileA.create() testFileB = DBSBufferFile(lfn=testFileA["lfn"]) testFileB.load() testFileC = DBSBufferFile(id=testFileA["id"]) testFileC.load() assert testFileA == testFileB, \ "ERROR: File load by LFN didn't work" assert testFileA == testFileC, \ "ERROR: File load by ID didn't work" assert type(testFileB["id"]) == int or type(testFileB["id"]) == long, \ "ERROR: File id is not an integer type." assert type(testFileB["size"]) == int or type(testFileB["size"]) == long, \ "ERROR: File size is not an integer type." assert type(testFileB["events"]) == int or type(testFileB["events"]) == long, \ "ERROR: File events is not an integer type." assert type(testFileC["id"]) == int or type(testFileC["id"]) == long, \ "ERROR: File id is not an integer type." assert type(testFileC["size"]) == int or type(testFileC["size"]) == long, \ "ERROR: File size is not an integer type." assert type(testFileC["events"]) == int or type(testFileC["events"]) == long, \ "ERROR: File events is not an integer type." testFileA.delete() return
def testGetChildrenDAO(self): """ _testGetChildrenDAO_ Verify that the GetChildren DAO correctly returns the LFNs of a file's children. """ testFileChildA = DBSBufferFile(lfn = "/this/is/a/child/lfnA", size = 1024, events = 20) testFileChildA.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileChildA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileChildB = DBSBufferFile(lfn = "/this/is/a/child/lfnB", size = 1024, events = 20) testFileChildB.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileChildB.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileChildC = DBSBufferFile(lfn = "/this/is/a/child/lfnC", size = 1024, events = 20) testFileChildC.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileChildC.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileChildA.create() testFileChildB.create() testFileChildC.create() testFile = DBSBufferFile(lfn = "/this/is/a/lfn", size = 1024, events = 10) testFile.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFile.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFile.create() testFileChildA.addParents([testFile["lfn"]]) testFileChildB.addParents([testFile["lfn"]]) testFileChildC.addParents([testFile["lfn"]]) getChildrenAction = self.daoFactory(classname = "DBSBufferFiles.GetChildren") childLFNs = getChildrenAction.execute(testFile["lfn"]) assert len(childLFNs) == 3, \ "ERROR: Parent does not have the right amount of children." goldenLFNs = ["/this/is/a/child/lfnA", "/this/is/a/child/lfnB", "/this/is/a/child/lfnC"] for childLFN in childLFNs: assert childLFN in goldenLFNs, \ "ERROR: Unknown child lfn" goldenLFNs.remove(childLFN) return
def testAddChildTransaction(self): """ _testAddChildTransaction_ Add a child to some parent files and make sure that all the parentage information is loaded/stored correctly from the database. Rollback the addition of one of the childs and then verify that it does in fact only have one parent. """ testFileParentA = DBSBufferFile(lfn="/this/is/a/parent/lfnA", size=1024, events=20) testFileParentA.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileParentA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileParentA.addRun(Run(1, *[45])) testFileParentB = DBSBufferFile(lfn="/this/is/a/parent/lfnB", size=1024, events=20) testFileParentB.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileParentB.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileParentB.addRun(Run(1, *[45])) testFileParentA.create() testFileParentB.create() testFileA = DBSBufferFile(lfn="/this/is/a/lfn", size=1024, events=10) testFileA.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileA.addRun(Run(1, *[45])) testFileA.create() testFileParentA.addChildren("/this/is/a/lfn") myThread = threading.currentThread() myThread.transaction.begin() testFileParentB.addChildren("/this/is/a/lfn") testFileB = DBSBufferFile(id=testFileA["id"]) testFileB.load(parentage=1) goldenFiles = [testFileParentA, testFileParentB] for parentFile in testFileB["parents"]: assert parentFile in goldenFiles, \ "ERROR: Unknown parent file" goldenFiles.remove(parentFile) assert len(goldenFiles) == 0, \ "ERROR: Some parents are missing" myThread.transaction.rollback() testFileB.load(parentage=1) goldenFiles = [testFileParentA] for parentFile in testFileB["parents"]: assert parentFile in goldenFiles, \ "ERROR: Unknown parent file" goldenFiles.remove(parentFile) assert len(goldenFiles) == 0, \ "ERROR: Some parents are missing" return
def testSetLocationByLFN(self): """ _testSetLocationByLFN_ """ myThread = threading.currentThread() testFileA = DBSBufferFile(lfn = "/this/is/a/lfn", size = 1024, events = 10) testFileA.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileA.addRun(Run( 1, *[45])) testFileA.create() setLocationAction = self.daoFactory(classname = "DBSBufferFiles.SetLocationByLFN") setLocationAction.execute(binds = {'lfn': "/this/is/a/lfn", 'sename': 'se1.cern.ch'}) testFileB = DBSBufferFile(id = testFileA["id"]) testFileB.load() self.assertEqual(testFileB['locations'], set(['se1.cern.ch'])) return
def testSetLocation(self): """ _testSetLocation_ Create a file and add a couple locations. Load the file from the database to make sure that the locations were set correctly. """ testFileA = DBSBufferFile(lfn="/this/is/a/lfn", size=1024, events=10) testFileA.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileA.addRun(Run(1, *[45])) testFileA.create() testFileA.setLocation(["se1.fnal.gov", "se1.cern.ch"]) testFileA.setLocation(["bunkse1.fnal.gov", "bunkse1.cern.ch"], immediateSave=False) testFileB = DBSBufferFile(id=testFileA["id"]) testFileB.load() goldenLocations = ["se1.fnal.gov", "se1.cern.ch"] for location in testFileB["locations"]: assert location in goldenLocations, \ "ERROR: Unknown file location" goldenLocations.remove(location) assert len(goldenLocations) == 0, \ "ERROR: Some locations are missing" return
def testBulkLoad(self): """ _testBulkLoad_ Can we load in bulk? """ addToBuffer = DBSBufferUtil() bulkLoad = self.daoFactory(classname = "DBSBufferFiles.LoadBulkFilesByID") testFileChildA = DBSBufferFile(lfn = "/this/is/a/child/lfnA", size = 1024, events = 20) testFileChildA.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileChildA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileChildB = DBSBufferFile(lfn = "/this/is/a/child/lfnB", size = 1024, events = 20) testFileChildB.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileChildB.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileChildC = DBSBufferFile(lfn = "/this/is/a/child/lfnC", size = 1024, events = 20) testFileChildC.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileChildC.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileChildA.create() testFileChildB.create() testFileChildC.create() testFileChildA.setLocation(["se1.fnal.gov", "se1.cern.ch"]) testFileChildB.setLocation(["se1.fnal.gov", "se1.cern.ch"]) testFileChildC.setLocation(["se1.fnal.gov", "se1.cern.ch"]) runSet = set() runSet.add(Run( 1, *[45])) runSet.add(Run( 2, *[67, 68])) testFileChildA.addRunSet(runSet) testFileChildB.addRunSet(runSet) testFileChildC.addRunSet(runSet) testFileChildA.save() testFileChildB.save() testFileChildC.save() setCksumAction = self.daoFactory(classname = "DBSBufferFiles.AddChecksumByLFN") binds = [{'lfn': "/this/is/a/child/lfnA", 'cktype': 'adler32', 'cksum': 201}, {'lfn': "/this/is/a/child/lfnA", 'cktype': 'cksum', 'cksum': 101}, {'lfn': "/this/is/a/child/lfnB", 'cktype': 'adler32', 'cksum': 201}, {'lfn': "/this/is/a/child/lfnB", 'cktype': 'cksum', 'cksum': 101}, {'lfn': "/this/is/a/child/lfnC", 'cktype': 'adler32', 'cksum': 201}, {'lfn': "/this/is/a/child/lfnC", 'cktype': 'cksum', 'cksum': 101}] setCksumAction.execute(bulkList = binds) testFile = DBSBufferFile(lfn = "/this/is/a/lfn", size = 1024, events = 10) testFile.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFile.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFile.create() testFileChildA.addParents([testFile["lfn"]]) testFileChildB.addParents([testFile["lfn"]]) testFileChildC.addParents([testFile["lfn"]]) binds = [{'id': testFileChildA.exists()}, {'id': testFileChildB.exists()}, {'id': testFileChildC.exists()}] listOfFiles = addToBuffer.loadDBSBufferFilesBulk(fileObjs = binds) #print listOfFiles compareList = ['locations', 'psetHash', 'configContent', 'appName', 'appVer', 'appFam', 'events', 'datasetPath', 'runs'] for f in listOfFiles: self.assertTrue(f['lfn'] in ["/this/is/a/child/lfnA", "/this/is/a/child/lfnB", "/this/is/a/child/lfnC"], "Unknown file in loaded results") self.assertEqual(f['checksums'], {'adler32': '201', 'cksum': '101'}) for parent in f['parents']: self.assertEqual(parent['lfn'], testFile['lfn']) for key in compareList: self.assertEqual(f[key], testFileChildA[key])
def testSetLocationTransaction(self): """ _testSetLocationTransaction_ Create a file at specific locations and commit everything to the database. Reload the file from the database and verify that the locations are correct. Rollback the database transaction and once again reload the file. Verify that the original locations are back. """ testFileA = DBSBufferFile(lfn="/this/is/a/lfn", size=1024, events=10) testFileA.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileA.addRun(Run(1, *[45])) testFileA.create() testFileA.setLocation(["se1.fnal.gov"]) myThread = threading.currentThread() myThread.transaction.begin() testFileA.setLocation(["se1.cern.ch"]) testFileA.setLocation(["bunkse1.fnal.gov", "bunkse1.cern.ch"], immediateSave=False) testFileB = DBSBufferFile(id=testFileA["id"]) testFileB.load() goldenLocations = ["se1.fnal.gov", "se1.cern.ch"] for location in testFileB["locations"]: assert location in goldenLocations, \ "ERROR: Unknown file location" goldenLocations.remove(location) assert len(goldenLocations) == 0, \ "ERROR: Some locations are missing" myThread.transaction.rollback() testFileB.load() goldenLocations = ["se1.fnal.gov"] for location in testFileB["locations"]: assert location in goldenLocations, \ "ERROR: Unknown file location" goldenLocations.remove(location) assert len(goldenLocations) == 0, \ "ERROR: Some locations are missing" return
def testProperties(self): """ _testProperties_ Test added tags that use DBSBuffer to transfer from workload to DBS """ testFileA = DBSBufferFile(lfn = "/this/is/a/lfn", size = 1024, events = 10) testFileA.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileA.setValidStatus(validStatus = "VALID") testFileA.setProcessingVer(ver = "ProcVer") testFileA.setAcquisitionEra(era = "AcqEra") testFileA.setGlobalTag(globalTag = "GlobalTag") testFileA.setDatasetParent(datasetParent = "Parent") testFileA.create() # There are no accessors for these things because load is never called action = self.daoFactory2(classname = "LoadInfoFromDAS") das = action.execute(ids = [1])[0] self.assertEqual(das['Parent'], 'Parent') self.assertEqual(das['GlobalTag'], 'GlobalTag') self.assertEqual(das['ValidStatus'], 'VALID')
def testLocationsConstructor(self): """ _testLocationsConstructor_ Test to make sure that locations passed into the File() constructor are loaded from and save to the database correctly. Also test to make sure that the class behaves well when the location is passed in as a single string instead of a set. """ testFileA = DBSBufferFile(lfn="/this/is/a/lfn", size=1024, events=10, locations=set(["se1.fnal.gov"])) testFileA.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileA.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileA.addRun(Run(1, *[45])) testFileA.create() testFileB = DBSBufferFile(lfn="/this/is/a/lfn2", size=1024, events=10, locations="se1.fnal.gov") testFileB.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFileB.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") testFileB.addRun(Run(1, *[45])) testFileB.create() testFileC = DBSBufferFile(id=testFileA["id"]) testFileC.load() goldenLocations = ["se1.fnal.gov"] for location in testFileC["locations"]: assert location in goldenLocations, \ "ERROR: Unknown file location" goldenLocations.remove(location) assert len(goldenLocations) == 0, \ "ERROR: Some locations are missing" testFileC = DBSBufferFile(id=testFileB["id"]) testFileC.load() goldenLocations = ["se1.fnal.gov"] for location in testFileC["locations"]: assert location in goldenLocations, \ "ERROR: Unknown file location" goldenLocations.remove(location) assert len(goldenLocations) == 0, \ "ERROR: Some locations are missing" return
def addFileToDBS(self, jobReportFile, task): """ _addFileToDBS_ Add a file that was output from a job to the DBS buffer. """ datasetInfo = jobReportFile["dataset"] dbsFile = DBSBufferFile(lfn = jobReportFile["lfn"], size = jobReportFile["size"], events = jobReportFile["events"], checksums = jobReportFile["checksums"], status = "NOTUPLOADED") dbsFile.setAlgorithm(appName = datasetInfo["applicationName"], appVer = datasetInfo["applicationVersion"], appFam = jobReportFile["module_label"], psetHash = "GIBBERISH", configContent = jobReportFile.get('configURL')) dbsFile.setDatasetPath("/%s/%s/%s" % (datasetInfo["primaryDataset"], datasetInfo["processedDataset"], datasetInfo["dataTier"])) dbsFile.setValidStatus(validStatus = jobReportFile.get("validStatus", None)) dbsFile.setProcessingVer(ver = jobReportFile.get('processingVer', None)) dbsFile.setAcquisitionEra(era = jobReportFile.get('acquisitionEra', None)) dbsFile.setGlobalTag(globalTag = jobReportFile.get('globalTag', None)) dbsFile.setCustodialSite(custodialSite = jobReportFile.get('custodialSite', None)) dbsFile['task'] = task for run in jobReportFile["runs"]: newRun = Run(runNumber = run.run) newRun.extend(run.lumis) dbsFile.addRun(newRun) dbsFile.setLocation(se = list(jobReportFile["locations"])[0], immediateSave = False) self.dbsFilesToCreate.append(dbsFile) return
def testCreateDeleteExists(self): """ _testCreateDeleteExists_ Test the create(), delete() and exists() methods of the file class by creating and deleting a file. The exists() method will be called before and after creation and after deletion. """ testFile = DBSBufferFile(lfn="/this/is/a/lfn", size=1024, events=10) testFile.setAlgorithm(appName="cmsRun", appVer="CMSSW_2_1_8", appFam="RECO", psetHash="GIBBERISH", configContent="MOREGIBBERISH") testFile.setDatasetPath("/Cosmics/CRUZET09-PromptReco-v1/RECO") assert testFile.exists() == False, \ "ERROR: File exists before it was created" testFile.addRun(Run(1, *[45])) testFile.create() assert testFile.exists() > 0, \ "ERROR: File does not exist after it was created" testFile.delete() assert testFile.exists() == False, \ "ERROR: File exists after it has been deleted" return