def addFile(self, file, dataset=0): """ Add the file to the buffer """ myThread = threading.currentThread() existingTransaction = self.beginTransaction() bufferFile = DBSBufferFile(lfn = file['LFN'], size = file['Size'], events = file['TotalEvents'], cksum=file['Checksum'], dataset=dataset) runLumiList=file.getLumiSections() runList=[x['RunNumber'] for x in runLumiList] for runNumber in runList: lumis = [int(y['LumiSectionNumber']) for y in runLumiList if y['RunNumber']==runNumber] run=Run(runNumber, *lumis) bufferFile.addRun(run) if bufferFile.exists() == False: bufferFile.create() bufferFile.setLocation(se=file['SEName'], immediateSave = True) else: bufferFile.load() # Lets add the file to DBS Buffer as well #UPDATE File Count self.updateDSFileCount(dataset=dataset) #Parent files bufferFile.addParents(file.inputFiles) self.commitTransaction(existingTransaction) return
def addFile(self, file, dataset=0): """ Add the file to the buffer """ myThread = threading.currentThread() existingTransaction = self.beginTransaction() bufferFile = DBSBufferFile(lfn = file['LFN'], size = file['Size'], events = file['TotalEvents'], cksum=file['Checksum'], dataset=dataset) runLumiList=file.getLumiSections() runList=[x['RunNumber'] for x in runLumiList] for runNumber in runList: lumis = [int(y['LumiSectionNumber']) for y in runLumiList if y['RunNumber']==runNumber] run=Run(runNumber, *lumis) bufferFile.addRun(run) if bufferFile.exists() == False: bufferFile.create() bufferFile.setLocation(pnn=file['locations'], immediateSave = True) else: bufferFile.load() # Lets add the file to DBS Buffer as well #UPDATE File Count self.updateDSFileCount(dataset=dataset) #Parent files bufferFile.addParents(file.inputFiles) self.commitTransaction(existingTransaction) return
#sys.exit(0) psetInstance = DbsQueryableParameterSet(Hash = "GIBBERISH") for newBlockName in badFiles.keys(): seName = blockLocation[newBlockName] (datasetPath, junk) = newBlockName.split("#", 1) dbsApi.insertBlock(datasetPath, newBlockName, storage_element_list = [seName]) blockRef = dbsApi.listBlocks(dataset = datasetPath, block_name = newBlockName)[0] print blockRef newFiles = [] for newFileLFN in badFiles[newBlockName]: localFile = DBSBufferFile(lfn = newFileLFN) localFile.load(parentage = 1) (primaryDS, procDS, tier) = datasetPath[1:].split("/", 3) primary = DbsPrimaryDataset(Name = primaryDS, Type = "mc") algo = DbsAlgorithm(ExecutableName = localFile["appName"], ApplicationVersion = localFile["appVer"], ApplicationFamily = localFile["appFam"], ParameterSetID = psetInstance) processed = DbsProcessedDataset(PrimaryDataset = primary, AlgoList = [algo], Name = procDS, TierList = [tier], ParentList = [], PhysicsGroup = "NoGroup", Status = "VALID", GlobalTag = "")
for newBlockName in badFiles.keys(): seName = blockLocation[newBlockName] (datasetPath, junk) = newBlockName.split("#", 1) dbsApi.insertBlock(datasetPath, newBlockName, storage_element_list=[seName]) blockRef = dbsApi.listBlocks(dataset=datasetPath, block_name=newBlockName)[0] print blockRef newFiles = [] for newFileLFN in badFiles[newBlockName]: localFile = DBSBufferFile(lfn=newFileLFN) localFile.load(parentage=1) (primaryDS, procDS, tier) = datasetPath[1:].split("/", 3) primary = DbsPrimaryDataset(Name=primaryDS, Type="mc") algo = DbsAlgorithm(ExecutableName=localFile["appName"], ApplicationVersion=localFile["appVer"], ApplicationFamily=localFile["appFam"], ParameterSetID=psetInstance) processed = DbsProcessedDataset(PrimaryDataset=primary, AlgoList=[algo], Name=procDS, TierList=[tier], ParentList=[], PhysicsGroup="NoGroup", Status="VALID", GlobalTag="")