def loadDBSBufferFilesBulk(self, fileObjs): """ _loadDBSBufferFilesBulk_ Yes, this is a stupid place to put it. No, there's not better place. """ myThread = threading.currentThread() dbsFiles = [] existingTransaction = self.beginTransaction() factory = DAOFactory(package = "WMComponent.DBSBuffer.Database", logger = myThread.logger, dbinterface = myThread.dbi) binds = [] for f in fileObjs: binds.append(f["id"]) loadFiles = factory(classname = "DBSBufferFiles.LoadBulkFilesByID") results = loadFiles.execute(files = binds, conn = self.getDBConn(), transaction = self.existingTransaction()) for entry in results: # Add loaded information dbsfile = DBSBufferFile(id=entry['id']) dbsfile.update(entry) dbsFiles.append(dbsfile) for dbsfile in dbsFiles: if 'runInfo' in dbsfile.keys(): # Then we have to replace it with a real run for r in dbsfile['runInfo'].keys(): run = Run(runNumber = r) run.extend(dbsfile['runInfo'][r]) dbsfile.addRun(run) del dbsfile['runInfo'] if 'parentLFNs' in dbsfile.keys(): # Then we have some parents for lfn in dbsfile['parentLFNs']: newFile = DBSBufferFile(lfn = lfn) dbsfile['parents'].add(newFile) del dbsfile['parentLFNs'] self.commitTransaction(existingTransaction) return dbsFiles
def loadDBSBufferFilesBulk(self, fileObjs): """ _loadDBSBufferFilesBulk_ Yes, this is a stupid place to put it. No, there's not better place. """ myThread = threading.currentThread() dbsFiles = [] existingTransaction = self.beginTransaction() factory = DAOFactory(package = "WMComponent.DBSBuffer.Database", logger = myThread.logger, dbinterface = myThread.dbi) binds = [] for f in fileObjs: binds.append(f["id"]) loadFiles = factory(classname = "DBSBufferFiles.LoadBulkFilesByID") results = loadFiles.execute(files = binds, conn = self.getDBConn(), transaction = self.existingTransaction()) for entry in results: # Add loaded information dbsfile = DBSBufferFile(id=entry['id']) dbsfile.update(entry) dbsFiles.append(dbsfile) for dbsfile in dbsFiles: if 'runInfo' in dbsfile.keys(): # Then we have to replace it with a real run for r in dbsfile['runInfo'].keys(): run = Run(runNumber = r) run.extend(dbsfile['runInfo'][r]) dbsfile.addRun(run) del dbsfile['runInfo'] if 'parentLFNs' in dbsfile.keys(): # Then we have some parents for lfn in dbsfile['parentLFNs']: newFile = DBSBufferFile(lfn = lfn) dbsfile['parents'].add(newFile) del dbsfile['parentLFNs'] self.commitTransaction(existingTransaction) return dbsFiles
def findUploadableFilesByDAS(self, das): """ _findUploadableDAS_ Find all the Dataset-Algo files available with uploadable files. """ myThread = threading.currentThread() existingTransaction = self.beginTransaction() dbsFiles = [] factory = DAOFactory(package="WMComponent.DBSUpload.Database", logger=myThread.logger, dbinterface=myThread.dbi) findFiles = factory(classname="LoadDBSFilesByDAS") results = findFiles.execute(das=das, conn=self.getDBConn(), transaction=self.existingTransaction()) for entry in results: # Add loaded information dbsfile = DBSBufferFile(id=entry['id']) dbsfile.update(entry) dbsFiles.append(dbsfile) for dbsfile in dbsFiles: if 'runInfo' in dbsfile.keys(): # Then we have to replace it with a real run for r in dbsfile['runInfo'].keys(): run = Run(runNumber=r) run.extend(dbsfile['runInfo'][r]) dbsfile.addRun(run) del dbsfile['runInfo'] if 'parentLFNs' in dbsfile.keys(): # Then we have some parents for lfn in dbsfile['parentLFNs']: newFile = DBSBufferFile(lfn=lfn) dbsfile['parents'].add(newFile) del dbsfile['parentLFNs'] self.commitTransaction(existingTransaction) return dbsFiles
def findUploadableFilesByDAS(self, das): """ _findUploadableDAS_ Find all the Dataset-Algo files available with uploadable files. """ myThread = threading.currentThread() existingTransaction = self.beginTransaction() dbsFiles = [] factory = DAOFactory(package = "WMComponent.DBSUpload.Database", logger = myThread.logger, dbinterface = myThread.dbi) findFiles = factory(classname = "LoadDBSFilesByDAS") results = findFiles.execute(das = das, conn = self.getDBConn(), transaction=self.existingTransaction()) for entry in results: # Add loaded information dbsfile = DBSBufferFile(id=entry['id']) dbsfile.update(entry) dbsFiles.append(dbsfile) for dbsfile in dbsFiles: if 'runInfo' in dbsfile.keys(): # Then we have to replace it with a real run for r in dbsfile['runInfo'].keys(): run = Run(runNumber = r) run.extend(dbsfile['runInfo'][r]) dbsfile.addRun(run) del dbsfile['runInfo'] if 'parentLFNs' in dbsfile.keys(): # Then we have some parents for lfn in dbsfile['parentLFNs']: newFile = DBSBufferFile(lfn = lfn) dbsfile['parents'].add(newFile) del dbsfile['parentLFNs'] self.commitTransaction(existingTransaction) return dbsFiles
def loadFilesFromBlocks(self, blockID): """ _loadFilesFromBlocks_ Load the files from all active blocks """ findFiles = self.factory(classname="LoadFilesFromBlocks") myThread = threading.currentThread() existingTransaction = self.beginTransaction() dbsFiles = [] results = findFiles.execute(blockID=blockID, conn=self.getDBConn(), transaction=self.existingTransaction()) for entry in results: # Add loaded information dbsfile = DBSBufferFile(id=entry['id']) dbsfile.update(entry) dbsFiles.append(dbsfile) for dbsfile in dbsFiles: if 'runInfo' in dbsfile.keys(): # Then we have to replace it with a real run for r in dbsfile['runInfo'].keys(): run = Run(runNumber=r) run.extend(dbsfile['runInfo'][r]) dbsfile.addRun(run) del dbsfile['runInfo'] if 'parentLFNs' in dbsfile.keys(): # Then we have some parents for lfn in dbsfile['parentLFNs']: newFile = DBSBufferFile(lfn=lfn) dbsfile['parents'].add(newFile) del dbsfile['parentLFNs'] self.commitTransaction(existingTransaction) return dbsFiles
def loadFilesFromBlocks(self, blockID): """ _loadFilesFromBlocks_ Load the files from all active blocks """ findFiles = self.factory(classname = "LoadFilesFromBlocks") myThread = threading.currentThread() existingTransaction = self.beginTransaction() dbsFiles = [] results = findFiles.execute(blockID = blockID, conn = self.getDBConn(), transaction=self.existingTransaction()) for entry in results: # Add loaded information dbsfile = DBSBufferFile(id=entry['id']) dbsfile.update(entry) dbsFiles.append(dbsfile) for dbsfile in dbsFiles: if 'runInfo' in dbsfile.keys(): # Then we have to replace it with a real run for r in dbsfile['runInfo'].keys(): run = Run(runNumber = r) run.extend(dbsfile['runInfo'][r]) dbsfile.addRun(run) del dbsfile['runInfo'] if 'parentLFNs' in dbsfile.keys(): # Then we have some parents for lfn in dbsfile['parentLFNs']: newFile = DBSBufferFile(lfn = lfn) dbsfile['parents'].add(newFile) del dbsfile['parentLFNs'] self.commitTransaction(existingTransaction) return dbsFiles
def getFiles(self, name, tier, nFiles = 12, site = "malpaquet"): """ Create some quick dummy test files """ files = [] for f in range(0, nFiles): testFile = DBSBufferFile(lfn = '%s-%s-%i' % (name, site, f), size = 1024, events = 20, checksums = {'cksum': 1}) testFile.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_3_1_1", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFile.setDatasetPath("/%s/%s/%s" % (name, name, tier)) testFile.addRun(Run( 1, *[f])) testFile['locations'].add(site) files.append(testFile) return files
def addFile(self, file, dataset=0): """ Add the file to the buffer """ myThread = threading.currentThread() existingTransaction = self.beginTransaction() bufferFile = DBSBufferFile(lfn = file['LFN'], size = file['Size'], events = file['TotalEvents'], cksum=file['Checksum'], dataset=dataset) runLumiList=file.getLumiSections() runList=[x['RunNumber'] for x in runLumiList] for runNumber in runList: lumis = [int(y['LumiSectionNumber']) for y in runLumiList if y['RunNumber']==runNumber] run=Run(runNumber, *lumis) bufferFile.addRun(run) if bufferFile.exists() == False: bufferFile.create() bufferFile.setLocation(se=file['SEName'], immediateSave = True) else: bufferFile.load() # Lets add the file to DBS Buffer as well #UPDATE File Count self.updateDSFileCount(dataset=dataset) #Parent files bufferFile.addParents(file.inputFiles) self.commitTransaction(existingTransaction) return
def addFile(self, file, dataset=0): """ Add the file to the buffer """ myThread = threading.currentThread() existingTransaction = self.beginTransaction() bufferFile = DBSBufferFile(lfn = file['LFN'], size = file['Size'], events = file['TotalEvents'], cksum=file['Checksum'], dataset=dataset) runLumiList=file.getLumiSections() runList=[x['RunNumber'] for x in runLumiList] for runNumber in runList: lumis = [int(y['LumiSectionNumber']) for y in runLumiList if y['RunNumber']==runNumber] run=Run(runNumber, *lumis) bufferFile.addRun(run) if bufferFile.exists() == False: bufferFile.create() bufferFile.setLocation(pnn=file['locations'], immediateSave = True) else: bufferFile.load() # Lets add the file to DBS Buffer as well #UPDATE File Count self.updateDSFileCount(dataset=dataset) #Parent files bufferFile.addParents(file.inputFiles) self.commitTransaction(existingTransaction) return
def stuffDatabase(self): """ _stuffDatabase_ Fill the dbsbuffer with some files and blocks. We'll insert a total of 5 files spanning two blocks. There will be a total of two datasets inserted into the datbase. We'll inject files with the location set as an SE name as well as a PhEDEx node name as well. """ checksums = {"adler32": "1234", "cksum": "5678"} testFileA = DBSBufferFile(lfn = makeUUID(), size = 1024, events = 10, checksums = checksums, locations = set(["srm-cms.cern.ch"])) testFileA.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileA.setDatasetPath(self.testDatasetA) testFileA.addRun(Run(2, *[45])) testFileA.create() testFileB = DBSBufferFile(lfn = makeUUID(), size = 1024, events = 10, checksums = checksums, locations = set(["srm-cms.cern.ch"])) testFileB.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileB.setDatasetPath(self.testDatasetA) testFileB.addRun(Run(2, *[45])) testFileB.create() testFileC = DBSBufferFile(lfn = makeUUID(), size = 1024, events = 10, checksums = checksums, locations = set(["srm-cms.cern.ch"])) testFileC.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileC.setDatasetPath(self.testDatasetA) testFileC.addRun(Run(2, *[45])) testFileC.create() self.testFilesA.append(testFileA) self.testFilesA.append(testFileB) self.testFilesA.append(testFileC) testFileD = DBSBufferFile(lfn = makeUUID(), size = 1024, events = 10, checksums = checksums, locations = set(["srm-cms.cern.ch"])) testFileD.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileD.setDatasetPath(self.testDatasetB) testFileD.addRun(Run(2, *[45])) testFileD.create() testFileE = DBSBufferFile(lfn = makeUUID(), size = 1024, events = 10, checksums = checksums, locations = set(["srm-cms.cern.ch"])) testFileE.setAlgorithm(appName = "cmsRun", appVer = "CMSSW_2_1_8", appFam = "RECO", psetHash = "GIBBERISH", configContent = "MOREGIBBERISH") testFileE.setDatasetPath(self.testDatasetB) testFileE.addRun(Run(2, *[45])) testFileE.create() self.testFilesB.append(testFileD) self.testFilesB.append(testFileE) myThread = threading.currentThread() uploadFactory = DAOFactory(package = "WMComponent.DBSUpload.Database", logger = myThread.logger, dbinterface = myThread.dbi) createBlock = uploadFactory(classname = "SetBlockStatus") self.blockAName = self.testDatasetA + "#" + makeUUID() self.blockBName = self.testDatasetB + "#" + makeUUID() createBlock.execute(block = self.blockAName, locations = ["srm-cms.cern.ch"], open_status = 1) createBlock.execute(block = self.blockBName, locations = ["srm-cms.cern.ch"], open_status = 1) bufferFactory = DAOFactory(package = "WMComponent.DBSBuffer.Database", logger = myThread.logger, dbinterface = myThread.dbi) setBlock = bufferFactory(classname = "DBSBufferFiles.SetBlock") setBlock.execute(testFileA["lfn"], self.blockAName) setBlock.execute(testFileB["lfn"], self.blockAName) setBlock.execute(testFileC["lfn"], self.blockAName) setBlock.execute(testFileD["lfn"], self.blockBName) setBlock.execute(testFileE["lfn"], self.blockBName) fileStatus = bufferFactory(classname = "DBSBufferFiles.SetStatus") fileStatus.execute(testFileA["lfn"], "LOCAL") fileStatus.execute(testFileB["lfn"], "LOCAL") fileStatus.execute(testFileC["lfn"], "LOCAL") fileStatus.execute(testFileD["lfn"], "LOCAL") fileStatus.execute(testFileE["lfn"], "LOCAL") return