def prepareNewJobs(self, maxFilesPerJob=100, maxAttemptsPerFile=10): log = self._log.getSubLogger("_prepareNewJobs", child=True) filesToSubmit = self._getFilesToSubmit( maxAttemptsPerFile=maxAttemptsPerFile) log.debug("%s ftsFiles to submit" % len(filesToSubmit)) newJobs = [] # {targetSE : [FTS3Files] } res = FTS3Utilities.groupFilesByTarget(filesToSubmit) if not res['OK']: return res filesGroupedByTarget = res['Value'] for targetSE, ftsFiles in filesGroupedByTarget.iteritems(): res = self._checkSEAccess(targetSE, 'WriteAccess', vo=self.vo) if not res['OK']: # If the SE is currently banned, we just skip it if cmpError(res, errno.EACCES): log.info( "Write access currently not permitted to %s, skipping." % targetSE) else: log.error(res) for ftsFile in ftsFiles: ftsFile.attempt += 1 continue sourceSEs = self.sourceSEs.split( ',') if self.sourceSEs is not None else [] # { sourceSE : [FTSFiles] } res = FTS3Utilities.selectUniqueRandomSource( ftsFiles, allowedSources=sourceSEs) if not res['OK']: return res uniqueTransfersBySource = res['Value'] # We don't need to check the source, since it is already filtered by the DataManager for sourceSE, ftsFiles in uniqueTransfersBySource.iteritems(): for ftsFilesChunk in breakListIntoChunks( ftsFiles, maxFilesPerJob): newJob = self._createNewJob('Transfer', ftsFilesChunk, targetSE, sourceSE=sourceSE) newJobs.append(newJob) return S_OK(newJobs)
def prepareNewJobs(self, maxFilesPerJob=100, maxAttemptsPerFile=10): log = self._log.getSubLogger("_prepareNewJobs", child=True) filesToSubmit = self._getFilesToSubmit(maxAttemptsPerFile=maxAttemptsPerFile) log.debug("%s ftsFiles to submit" % len(filesToSubmit)) newJobs = [] # {targetSE : [FTS3Files] } res = FTS3Utilities.groupFilesByTarget(filesToSubmit) if not res['OK']: return res filesGroupedByTarget = res['Value'] for targetSE, ftsFiles in filesGroupedByTarget.iteritems(): res = self._checkSEAccess(targetSE, 'WriteAccess', vo=self.vo) if not res['OK']: # If the SE is currently banned, we just skip it if cmpError(res, errno.EACCES): log.info("Write access currently not permitted to %s, skipping." % targetSE) else: log.error(res) for ftsFile in ftsFiles: ftsFile.attempt += 1 continue sourceSEs = self.sourceSEs.split(',') if self.sourceSEs is not None else [] # { sourceSE : [FTSFiles] } res = FTS3Utilities.selectUniqueRandomSource(ftsFiles, allowedSources=sourceSEs) if not res['OK']: return res uniqueTransfersBySource = res['Value'] # We don't need to check the source, since it is already filtered by the DataManager for sourceSE, ftsFiles in uniqueTransfersBySource.iteritems(): for ftsFilesChunk in breakListIntoChunks(ftsFiles, maxFilesPerJob): newJob = self._createNewJob('Transfer', ftsFilesChunk, targetSE, sourceSE=sourceSE) newJobs.append(newJob) return S_OK(newJobs)
def prepareNewJobs(self, maxFilesPerJob=100, maxAttemptsPerFile=10): log = self._log.getSubLogger("_prepareNewJobs", child=True) filesToSubmit = self._getFilesToSubmit( maxAttemptsPerFile=maxAttemptsPerFile) log.debug("%s ftsFiles to submit" % len(filesToSubmit)) newJobs = [] # {targetSE : [FTS3Files] } res = FTS3Utilities.groupFilesByTarget(filesToSubmit) if not res['OK']: return res filesGroupedByTarget = res['Value'] for targetSE, ftsFiles in filesGroupedByTarget.items(): res = self._checkSEAccess(targetSE, 'WriteAccess', vo=self.vo) if not res['OK']: # If the SE is currently banned, we just skip it if cmpError(res, errno.EACCES): log.info( "Write access currently not permitted to %s, skipping." % targetSE) else: log.error(res) for ftsFile in ftsFiles: ftsFile.attempt += 1 continue sourceSEs = self.sourceSEs.split( ',') if self.sourceSEs is not None else [] # { sourceSE : [FTSFiles] } res = FTS3Utilities.selectUniqueRandomSource( ftsFiles, allowedSources=sourceSEs) if not res['OK']: return res uniqueTransfersBySource, failedFiles = res['Value'] # Treat the errors of the failed files for ftsFile, errMsg in failedFiles.items(): log.error("Error when selecting random sources", "%s, %s" % (ftsFile.lfn, errMsg)) # If the error is that the file does not exist in the catalog # fail it ! if cmpError(errMsg, errno.ENOENT): log.error("The file does not exist, setting it Defunct", "%s" % ftsFile.lfn) ftsFile.status = 'Defunct' # We don't need to check the source, since it is already filtered by the DataManager for sourceSE, ftsFiles in uniqueTransfersBySource.items(): if self.__needsMultiHopStaging(sourceSE, targetSE): log.verbose( "Needs multihop staging, max files per job is 1") maxFilesPerJob = 1 for ftsFilesChunk in breakListIntoChunks( ftsFiles, maxFilesPerJob): newJob = self._createNewJob('Transfer', ftsFilesChunk, targetSE, sourceSE=sourceSE) newJobs.append(newJob) return S_OK(newJobs)