Ejemplo n.º 1
0
        except WMException:
            raise
        except Exception, ex:
            msg =  "Unhandled exception while loading blocks.\n"
            msg += str(ex)
            logging.error(msg)
            logging.debug("Blocks to load: %s\n" % blocksToLoad)
            raise DBSUploadException(msg)
        
        for blockInfo in loadedBlocks:
            das  = blockInfo['DatasetAlgo']
            loc  = blockInfo['location']
            block = DBSBlock(name = blockInfo['Name'],
                             location = loc, das = das)
            block.FillFromDBSBuffer(blockInfo)
            blockname = block.getName()

            # Now we have to load files...
            try:
                files = self.dbsUtil.loadFilesByBlock(blockname = blockname)
            except WMException:
                raise
            except Exception, ex:
                msg =  "Unhandled exception while loading files for existing blocks.\n"
                msg += str(ex)
                logging.error(msg)
                logging.debug("Blocks being loaded: %s\n" % blockname)
                raise DBSUploadException(msg)
            for file in files:
                block.addFile(file)
Ejemplo n.º 2
0
    def loadFiles(self):
        """
        _loadFiles_

        Load all files that need to be loaded.

        I will do this by DAS for now to break
        the monstrous calls down into smaller chunks.
        """


        # Grab all the Dataset-Algo combindations
        dasList = self.dbsUtil.findUploadableDAS()

        if len(dasList) < 1:
            # Then there's nothing to do
            return []

        readyBlocks = []
        for dasInfo in dasList:

            dasID = dasInfo['DAS_ID']
            
            # Get the files
            try:
                loadedFiles = self.dbsUtil.findUploadableFilesByDAS(das = dasID)
            except WMException:
                raise
            except Exception, ex:
                msg =  "Unhandled exception while loading uploadable files for DAS.\n"
                msg += str(ex)
                logging.error(msg)
                logging.debug("DAS being loaded: %s\n" % dasID)
                raise DBSUploadException(msg)

            # Get the blocks
            if not dasID in self.dasCache.keys():
                # Then we have a new DAS
                # Add it
                self.dasCache[dasID] = {}
            dasBlocks = self.dasCache.get(dasID)

            # Sort the files and blocks by location
            fileDict = sortListByKey(input = loadedFiles, key = 'locations')

            # Now we have both files and blocks
            # We need a sorting algorithm of sorts...

            

            # Now add each file
            for location in fileDict.keys():
                files = fileDict.get(location)

                if len(files) < 1:
                    # Nothing to do here
                    continue
                
                dasBlocks = self.dasCache[dasID].get(location, [])
                if len(dasBlocks) > 0:
                    # Load from cache
                    currentBlock = self.blockCache.get(dasBlocks[0])
                else:
                    blockname = '%s#%s' % (files[0]['datasetPath'], makeUUID())
                    currentBlock = DBSBlock(name = blockname,
                                            location = location, das = dasID)
                    # Add the era info
                    currentBlock.setAcquisitionEra(era = dasInfo['AcquisitionEra'])
                    currentBlock.setProcessingVer(era = dasInfo['ProcessingVer'])
                    self.addNewBlock(block = currentBlock)
                    dasBlocks.append(currentBlock.getName())

                for newFile in files:
                    if not newFile.get('block', 1) == None:
                        # Then this file already has a block
                        # It should be accounted for somewhere
                        # Or loaded with the block
                        continue
                    
                    # Check if we can put files in this block
                    if not self.isBlockOpen(newFile = newFile,
                                            block = currentBlock):
                        # Then we have to close the block and get a new one
                        currentBlock.status = 'Pending'
                        readyBlocks.append(currentBlock)
                        dasBlocks.remove(currentBlock.getName())
                        currentBlock = self.getBlock(newFile = newFile,
                                                     dasBlocks = dasBlocks,
                                                     location = location,
                                                     das = dasID)
                        currentBlock.setAcquisitionEra(era = dasInfo['AcquisitionEra'])
                        currentBlock.setProcessingVer(era = dasInfo['ProcessingVer'])

                    # Now deal with the file
                    currentBlock.addFile(dbsFile = newFile)
                    self.filesToUpdate.append({'filelfn': newFile['lfn'],
                                               'block': currentBlock.getName()})
                # Done with the location
                readyBlocks.append(currentBlock)
Ejemplo n.º 3
0
            logging.info("Loaded blocks: %s" % loadedBlocks)
        except WMException:
            raise
        except Exception, ex:
            msg = "Unhandled exception while loading blocks.\n"
            msg += str(ex)
            logging.error(msg)
            logging.debug("Blocks to load: %s\n" % blocksToLoad)
            raise DBSUploadException(msg)

        for blockInfo in loadedBlocks:
            das = blockInfo["DatasetAlgo"]
            loc = blockInfo["origin_site_name"]
            block = DBSBlock(name=blockInfo["block_name"], location=loc, das=das)
            block.FillFromDBSBuffer(blockInfo)
            blockname = block.getName()

            # Now we have to load files...
            try:
                files = self.dbsUtil.loadFilesByBlock(blockname=blockname)
                logging.info("Have %i files for block %s" % (len(files), blockname))
            except WMException:
                raise
            except Exception, ex:
                msg = "Unhandled exception while loading files for existing blocks.\n"
                msg += str(ex)
                logging.error(msg)
                logging.debug("Blocks being loaded: %s\n" % blockname)
                raise DBSUploadException(msg)

            # Add the loaded files to the block
Ejemplo n.º 4
0
            msg = "Unhandled exception while loading blocks.\n"
            msg += str(ex)
            logging.error(msg)
            logging.debug("Blocks to load: %s\n" % blocksToLoad)
            raise DBSUploadException(msg)

        for blockInfo in loadedBlocks:
            das = blockInfo['DatasetAlgo']
            loc = blockInfo['origin_site_name']
            workflow = blockInfo['workflow']
            block = DBSBlock(name=blockInfo['block_name'],
                             location=loc,
                             das=das,
                             workflow=workflow)
            block.FillFromDBSBuffer(blockInfo)
            blockname = block.getName()

            # Now we have to load files...
            try:
                files = self.dbsUtil.loadFilesByBlock(blockname=blockname)
                logging.info("Have %i files for block %s" %
                             (len(files), blockname))
            except WMException:
                raise
            except Exception, ex:
                msg = "Unhandled exception while loading files for existing blocks.\n"
                msg += str(ex)
                logging.error(msg)
                logging.debug("Blocks being loaded: %s\n" % blockname)
                raise DBSUploadException(msg)
Ejemplo n.º 5
0
    def loadFiles(self):
        """
        _loadFiles_

        Load all files that need to be loaded.  I will do this by DAS for now to
        break the monstrous calls down into smaller chunks.
        """
        # Grab all the Dataset-Algo combindations
        dasList = self.dbsUtil.findUploadableDAS()

        if len(dasList) < 1:
            # Then there's nothing to do
            return []

        readyBlocks = []
        for dasInfo in dasList:

            dasID = dasInfo['DAS_ID']

            # Get the files
            try:
                loadedFiles = self.dbsUtil.findUploadableFilesByDAS(das=dasID)
            except WMException:
                raise
            except Exception, ex:
                msg = "Unhandled exception while loading uploadable files for DAS.\n"
                msg += str(ex)
                logging.error(msg)
                logging.debug("DAS being loaded: %s\n" % dasID)
                raise DBSUploadException(msg)

            # Get the blocks
            if not dasID in self.dasCache.keys():
                # Then we have a new DAS
                # Add it
                self.dasCache[dasID] = {}
            dasBlocks = self.dasCache.get(dasID)

            # Sort the files and blocks by location
            fileDict = sortListByKey(input=loadedFiles, key='locations')

            # Now add each file
            for location in fileDict.keys():
                files = fileDict.get(location)

                if len(files) < 1:
                    # Nothing to do here
                    continue

                dasBlocks = self.dasCache[dasID].get(location, [])
                if len(dasBlocks) > 0:
                    # Load from cache
                    currentBlock = self.blockCache.get(dasBlocks[0])
                else:
                    blockname = '%s#%s' % (files[0]['datasetPath'], makeUUID())
                    currentBlock = DBSBlock(name=blockname,
                                            location=location,
                                            das=dasID)
                    # Add the era info
                    currentBlock.setAcquisitionEra(
                        era=dasInfo['AcquisitionEra'])
                    currentBlock.setProcessingVer(
                        procVer=dasInfo['ProcessingVer'])
                    self.addNewBlock(block=currentBlock)
                    dasBlocks.append(currentBlock.getName())

                for newFile in files:
                    if not newFile.get('block', 1) == None:
                        # Then this file already has a block
                        # It should be accounted for somewhere
                        # Or loaded with the block
                        continue

                    # Check if we can put files in this block
                    if not self.isBlockOpen(newFile=newFile,
                                            block=currentBlock):
                        # Then we have to close the block and get a new one
                        currentBlock.status = 'Pending'
                        readyBlocks.append(currentBlock)
                        dasBlocks.remove(currentBlock.getName())
                        currentBlock = self.getBlock(newFile=newFile,
                                                     dasBlocks=dasBlocks,
                                                     location=location,
                                                     das=dasID)
                        currentBlock.setAcquisitionEra(
                            era=dasInfo['AcquisitionEra'])
                        currentBlock.setProcessingVer(
                            procVer=dasInfo['ProcessingVer'])

                    # Now deal with the file
                    currentBlock.addFile(dbsFile=newFile)
                    self.filesToUpdate.append({
                        'filelfn': newFile['lfn'],
                        'block': currentBlock.getName()
                    })
                # Done with the location
                readyBlocks.append(currentBlock)
Ejemplo n.º 6
0
    def loadBlocks(self):
        """
        _loadBlocks_

        Find all blocks; make sure they're in the cache
        """
        openBlocks = self.dbsUtil.findOpenBlocks()
        logging.info("These are the openblocks: %s" % openBlocks)

        # Load them if we don't have them
        blocksToLoad = []
        for block in openBlocks:
            if not block['blockname'] in self.blockCache.keys():
                blocksToLoad.append(block['blockname'])


        # Now load the blocks
        try:
            loadedBlocks = self.dbsUtil.loadBlocks(blocksToLoad)
            logging.info("Loaded blocks: %s" % loadedBlocks)
        except WMException:
            raise
        except Exception as ex:
            msg =  "Unhandled exception while loading blocks.\n"
            msg += str(ex)
            logging.error(msg)
            logging.debug("Blocks to load: %s\n" % blocksToLoad)
            raise DBSUploadException(msg)

        for blockInfo in loadedBlocks:
            das  = blockInfo['DatasetAlgo']
            loc  = blockInfo['origin_site_name']
            workflow =  blockInfo['workflow']
            block = DBSBlock(name = blockInfo['block_name'],
                             location = loc, das = das, workflow = workflow)
            block.FillFromDBSBuffer(blockInfo)
            blockname = block.getName()

            # Now we have to load files...
            try:
                files = self.dbsUtil.loadFilesByBlock(blockname = blockname)
                logging.info("Have %i files for block %s" % (len(files), blockname))
            except WMException:
                raise
            except Exception as ex:
                msg =  "Unhandled exception while loading files for existing blocks.\n"
                msg += str(ex)
                logging.error(msg)
                logging.debug("Blocks being loaded: %s\n" % blockname)
                raise DBSUploadException(msg)

            # Add the loaded files to the block
            for file in files:
                block.addFile(file, self.datasetType, self.primaryDatasetType)

            # Add to the cache
            self.addNewBlock(block = block)

        # All blocks should now be loaded and present
        # in both the block cache (which has all the info)
        # and the dasCache (which is a list of name pointers
        # to the keys in the block cache).
        return
Ejemplo n.º 7
0
    def loadBlocks(self):
        """
        _loadBlocks_

        Find all blocks; make sure they're in the cache
        """
        openBlocks = self.dbsUtil.findOpenBlocks(self.dbs3UploadOnly)
        logging.info("These are the openblocks: %s" % openBlocks)

        # Load them if we don't have them
        blocksToLoad = []
        for block in openBlocks:
            if not block['blockname'] in self.blockCache.keys():
                blocksToLoad.append(block['blockname'])


        # Now load the blocks
        try:
            loadedBlocks = self.dbsUtil.loadBlocks(blocksToLoad, self.dbs3UploadOnly)
            logging.info("Loaded blocks: %s" % loadedBlocks)
        except WMException:
            raise
        except Exception as ex:
            msg =  "Unhandled exception while loading blocks.\n"
            msg += str(ex)
            logging.error(msg)
            logging.debug("Blocks to load: %s\n" % blocksToLoad)
            raise DBSUploadException(msg)

        for blockInfo in loadedBlocks:
            das  = blockInfo['DatasetAlgo']
            loc  = blockInfo['origin_site_name']
            workflow =  blockInfo['workflow']
            block = DBSBlock(name = blockInfo['block_name'],
                             location = loc, das = das, workflow = workflow)
            block.FillFromDBSBuffer(blockInfo)
            blockname = block.getName()

            # Now we have to load files...
            try:
                files = self.dbsUtil.loadFilesByBlock(blockname = blockname)
                logging.info("Have %i files for block %s" % (len(files), blockname))
            except WMException:
                raise
            except Exception as ex:
                msg =  "Unhandled exception while loading files for existing blocks.\n"
                msg += str(ex)
                logging.error(msg)
                logging.debug("Blocks being loaded: %s\n" % blockname)
                raise DBSUploadException(msg)

            # Add the loaded files to the block
            for file in files:
                block.addFile(file, self.datasetType, self.primaryDatasetType)

            # Add to the cache
            self.addNewBlock(block = block)

        # All blocks should now be loaded and present
        # in both the block cache (which has all the info)
        # and the dasCache (which is a list of name pointers
        # to the keys in the block cache).
        return