def getBlock(self, newFile, location, skipOpenCheck=False): """ _getBlock_ Retrieve a block is one exists with matching datasetpath/location and is open. If no such block is found create and return a new one. """ datasetpath = newFile["datasetPath"] for block in self.blockCache.values(): if datasetpath == block.getDatasetPath( ) and location == block.getLocation(): if not self.isBlockOpen(newFile=newFile, block=block) and not skipOpenCheck: # Block isn't open anymore. Mark it as pending so that it gets uploaded. block.setPendingAndCloseBlock() else: return block # A suitable open block does not exist. Create a new one. blockname = "%s#%s" % (datasetpath, makeUUID()) newBlock = DBSBufferBlock(name=blockname, location=location, datasetpath=datasetpath) parent = self.datasetParentageCache.get(datasetpath) if parent: newBlock.addDatasetParent(parent) logging.debug("Get block: Child dataset %s, Parent dataset %s", datasetpath, parent) self.blockCache[blockname] = newBlock return newBlock
def getBlock(self, newFile, location, skipOpenCheck=False): """ _getBlock_ Retrieve a block is one exists with matching datasetpath/location and is open. If no such block is found create and return a new one. """ datasetpath = newFile["datasetPath"] for block in self.blockCache.values(): if datasetpath == block.getDatasetPath() and location == block.getLocation(): if not self.isBlockOpen(newFile=newFile, block=block) and not skipOpenCheck: # Block isn't open anymore. Mark it as pending so that it gets uploaded. block.setPendingAndCloseBlock() else: return block # A suitable open block does not exist. Create a new one. blockname = "%s#%s" % (datasetpath, makeUUID()) newBlock = DBSBufferBlock(name=blockname, location=location, datasetpath=datasetpath) parent = self.datasetParentageCache.get(datasetpath) if parent: newBlock.addDatasetParent(parent) logging.debug("Get block: Child dataset %s, Parent dataset %s", datasetpath, parent) self.blockCache[blockname] = newBlock return newBlock
def loadBlocks(self): """ _loadBlocks_ Find all blocks; make sure they're in the cache """ openBlocks = self.dbsUtil.findOpenBlocks() logging.info("Found %d open blocks.", len(openBlocks)) logging.debug("These are the openblocks: %s", openBlocks) # Load them if we don't have them blocksToLoad = [] for block in openBlocks: if block['blockname'] not in self.blockCache: blocksToLoad.append(block['blockname']) # Now load the blocks try: loadedBlocks = self.dbsUtil.loadBlocks(blocksToLoad) logging.info("Loaded %d blocks.", len(loadedBlocks)) except WMException: raise except Exception as ex: msg = "Unhandled exception while loading blocks.\n" msg += str(ex) logging.error(msg) logging.debug("Blocks to load: %s\n", blocksToLoad) raise DBSUploadException(msg) for blockInfo in loadedBlocks: block = DBSBufferBlock(name=blockInfo['block_name'], location=blockInfo['origin_site_name'], datasetpath=blockInfo['datasetpath']) parent = self.datasetParentageCache.get(blockInfo['datasetpath']) if parent: block.addDatasetParent(parent) logging.debug( "Load block: Child dataset %s, Parent dataset %s", blockInfo['datasetpath'], parent) block.FillFromDBSBuffer(blockInfo) blockname = block.getName() # Now we have to load files... try: files = self.dbsUtil.loadFilesByBlock(blockname=blockname) logging.info("Have %i files for block %s", len(files), blockname) except WMException: raise except Exception as ex: msg = "Unhandled exception while loading files for existing blocks.\n" msg += str(ex) logging.error(msg) logging.debug("Blocks being loaded: %s\n", blockname) raise DBSUploadException(msg) # Add the loaded files to the block for f in files: block.addFile(f, self.datasetType, self.primaryDatasetType) # Add to the cache self.blockCache[blockInfo['block_name']] = block return
def loadBlocks(self): """ _loadBlocks_ Find all blocks; make sure they're in the cache """ openBlocks = self.dbsUtil.findOpenBlocks() logging.info("Found %d open blocks.", len(openBlocks)) logging.debug("These are the openblocks: %s", openBlocks) # Load them if we don't have them blocksToLoad = [] for block in openBlocks: if block['blockname'] not in self.blockCache.keys(): blocksToLoad.append(block['blockname']) # Now load the blocks try: loadedBlocks = self.dbsUtil.loadBlocks(blocksToLoad) logging.info("Loaded %d blocks.", len(loadedBlocks)) except WMException: raise except Exception as ex: msg = "Unhandled exception while loading blocks.\n" msg += str(ex) logging.error(msg) logging.debug("Blocks to load: %s\n", blocksToLoad) raise DBSUploadException(msg) for blockInfo in loadedBlocks: block = DBSBufferBlock(name=blockInfo['block_name'], location=blockInfo['origin_site_name'], datasetpath=blockInfo['datasetpath']) parent = self.datasetParentageCache.get(blockInfo['datasetpath']) if parent: block.addDatasetParent(parent) logging.debug("Load block: Child dataset %s, Parent dataset %s", blockInfo['datasetpath'], parent) block.FillFromDBSBuffer(blockInfo) blockname = block.getName() # Now we have to load files... try: files = self.dbsUtil.loadFilesByBlock(blockname=blockname) logging.info("Have %i files for block %s", len(files), blockname) except WMException: raise except Exception as ex: msg = "Unhandled exception while loading files for existing blocks.\n" msg += str(ex) logging.error(msg) logging.debug("Blocks being loaded: %s\n", blockname) raise DBSUploadException(msg) # Add the loaded files to the block for f in files: block.addFile(f, self.datasetType, self.primaryDatasetType) # Add to the cache self.blockCache[blockInfo['block_name']] = block return