def __init__(self, config, name, checkExecutor, cancelExecutor): WMS.__init__(self, config, name) for executor in [checkExecutor, cancelExecutor]: executor.setup(self._log) (self._check_executor, self._cancel_executor) = (checkExecutor, cancelExecutor) if self._name != self.__class__.__name__.upper(): self._log.info('Using batch system: %s (%s)', self.__class__.__name__, self._name) else: self._log.info('Using batch system: %s', self._name) self.errorLog = config.getWorkPath('error.tar') self._runlib = config.getWorkPath('gc-run.lib') if not os.path.exists(self._runlib): fp = SafeFile(self._runlib, 'w') content = SafeFile(utils.pathShare('gc-run.lib')).read() fp.write( content.replace('__GC_VERSION__', __import__('grid_control').__version__)) fp.close() self._outputPath = config.getWorkPath('output') self._filecachePath = config.getWorkPath('files') utils.ensureDirExists(self._outputPath, 'output directory') self._failPath = config.getWorkPath('fail') # Initialise access token and storage managers # UI -> SE -> WN self.smSEIn = config.getPlugin('se input manager', 'SEStorageManager', cls=StorageManager, tags=[self], pargs=('se', 'se input', 'SE_INPUT')) self.smSBIn = config.getPlugin('sb input manager', 'LocalSBStorageManager', cls=StorageManager, tags=[self], pargs=('sandbox', 'sandbox', 'SB_INPUT')) # UI <- SE <- WN self.smSEOut = config.getPlugin('se output manager', 'SEStorageManager', cls=StorageManager, tags=[self], pargs=('se', 'se output', 'SE_OUTPUT')) self.smSBOut = None self._token = config.getCompositePlugin(['proxy', 'access token'], 'TrivialAccessToken', 'MultiAccessToken', cls=AccessToken, inherit=True, tags=[self])
def __init__(self, config, wmsName): WMS.__init__(self, config, wmsName) if self.wmsName != self.__class__.__name__.upper(): utils.vprint( 'Using batch system: %s (%s)' % (self.__class__.__name__, self.wmsName), -1) else: utils.vprint('Using batch system: %s' % self.wmsName, -1) self.errorLog = config.getWorkPath('error.tar') self._runlib = config.getWorkPath('gc-run.lib') if not os.path.exists(self._runlib): fp = SafeFile(self._runlib, 'w') content = SafeFile(utils.pathShare('gc-run.lib')).read() fp.write( content.replace('__GC_VERSION__', __import__('grid_control').__version__)) fp.close() self._outputPath = config.getWorkPath('output') utils.ensureDirExists(self._outputPath, 'output directory') self._failPath = config.getWorkPath('fail') # Initialise access token, broker and storage manager self._token = config.getCompositePlugin(['proxy', 'access token'], 'TrivialAccessToken', 'MultiAccessToken', cls=AccessToken, inherit=True, tags=[self]) # UI -> SE -> WN self.smSEIn = config.getPlugin('se input manager', 'SEStorageManager', cls=StorageManager, tags=[self], pargs=('se', 'se input', 'SE_INPUT')) self.smSBIn = config.getPlugin('sb input manager', 'LocalSBStorageManager', cls=StorageManager, tags=[self], pargs=('sandbox', 'sandbox', 'SB_INPUT')) # UI <- SE <- WN self.smSEOut = config.getPlugin('se output manager', 'SEStorageManager', cls=StorageManager, tags=[self], pargs=('se', 'se output', 'SE_OUTPUT')) self.smSBOut = None self.fileNamesEnvironment = config.getBool("file names environment", True, onChange=None)
def run(args=None, intro=True): # display the 'grid-control' logo and version if intro and not os.environ.get('GC_DISABLE_INTRO'): sys.stdout.write(SafeFile(utils.pathShare('logo.txt'), 'r').read()) sys.stdout.write('Revision: %s\n' % utils.getVersion()) pyver = (sys.version_info[0], sys.version_info[1]) if pyver < (2, 3): utils.deprecated( 'This python version (%d.%d) is not supported anymore!' % pyver) Activity.root = Activity('Running grid-control', name='root') # top level activity instance # main try... except block to catch exceptions and show error message try: config = gc_create_config(args or sys.argv[1:], useDefaultFiles=True) workflow = gc_create_workflow(config) try: sys.exit(workflow.run()) finally: sys.stdout.write('\n') except SystemExit: # avoid getting caught for Python < 2.5 raise except Exception: # coverage overrides sys.excepthook gc_excepthook(*sys.exc_info()) sys.exit(os.EX_SOFTWARE)
def _fillContentWithIncludes(self, configFile, searchPaths, configContent): log = logging.getLogger(('config.%s' % utils.getRootName(configFile)).rstrip('.').lower()) log.log(logging.INFO1, 'Reading config file %s', configFile) configFile = utils.resolvePath(configFile, searchPaths, ErrorClass = ConfigError) configFileLines = SafeFile(configFile).readlines() # Single pass, non-recursive list retrieval tmpConfigContent = {} self._fillContentSingleFile(configFile, configFileLines, searchPaths, tmpConfigContent) def getFlatList(section, option): for (opt, value, src) in tmpConfigContent.get(section, []): try: if opt == option: for entry in parseList(value, None): yield entry except Exception: raise ConfigError('Unable to parse [%s] %s from %s' % (section, option, src)) newSearchPaths = [os.path.dirname(configFile)] # Add entries from include statement recursively for includeFile in getFlatList('global', 'include'): self._fillContentWithIncludes(includeFile, searchPaths + newSearchPaths, configContent) # Process all other entries in current file self._fillContentSingleFile(configFile, configFileLines, searchPaths, configContent) # Override entries in current config file for overrideFile in getFlatList('global', 'include override'): self._fillContentWithIncludes(overrideFile, searchPaths + newSearchPaths, configContent) # Filter special global options if configContent.get('global', []): configContent['global'] = lfilter(lambda opt_v_s: opt_v_s[0] not in ['include', 'include override'], configContent['global']) return searchPaths + newSearchPaths
def _write_process_log(self, record): entry = '%s_%s.%03d' % (record.name, time.strftime('%Y-%m-%d_%H:%M:%S', time.localtime(record.created)), int(record.msecs)) files = record.files files['info'] = 'call=%s\nexit=%s\n' % (repr( record.proc.get_call()), record.proc.status(0)) files['stdout'] = record.proc.stdout.read_log() files['stderr'] = record.proc.stderr.read_log() files['stdin'] = record.proc.stdin.read_log() try: tar = tarfile.TarFile.open(self._fn, 'a') for key, value in record.files.items(): if os.path.exists(value): value = SafeFile(value).read() fileObj = VirtualFile(os.path.join(entry, key), [value]) info, handle = fileObj.getTarInfo() tar.addfile(info, handle) handle.close() tar.close() except Exception: raise GCError( 'Unable to log results of external call "%s" to "%s"' % (record.proc.get_call(), self._fn))
def __init__(self, configFiles): from gcSettings import Settings for configFile in configFiles: fp = SafeFile(configFile) try: utils.execWrapper(fp.read(), {'Settings': Settings}) finally: fp.close() DictConfigFiller.__init__(self, Settings.getConfigDict())
def _logDisabledJobs(self): disabled = self.jobDB.getJobs(ClassSelector(JobClass.DISABLED)) try: fp = SafeFile(self._disabled_jobs_logfile, 'w') fp.write(str.join('\n', imap(str, disabled))) fp.close() except Exception: raise JobError('Could not write disabled jobs to file %s!' % self._disabled_jobs_logfile) if disabled: self._log_user_time.warning( 'There are %d disabled jobs in this task!', len(disabled)) self._log_user_time.debug( 'Please refer to %s for a complete list of disabled jobs.', self._disabled_jobs_logfile)
def _submitJob(self, jobNum, module): fd, jdl = tempfile.mkstemp('.jdl') try: jdlData = self.makeJDL(jobNum, module) utils.safeWrite(os.fdopen(fd, 'w'), jdlData) except Exception: utils.removeFiles([jdl]) raise BackendError('Could not write jdl data to %s.' % jdl) try: submitArgs = [] for key_value in utils.filterDict(self._submitParams, vF=lambda v: v).items(): submitArgs.extend(key_value) submitArgs.append(jdl) activity = Activity('submitting job %d' % jobNum) proc = LocalProcess(self._submitExec, '--nomsg', '--noint', '--logfile', '/dev/stderr', *submitArgs) gcID = None for line in ifilter(lambda x: x.startswith('http'), imap(str.strip, proc.stdout.iter(timeout=60))): gcID = line retCode = proc.status(timeout=0, terminate=True) activity.finish() if (retCode != 0) or (gcID is None): if self.explainError(proc, retCode): pass else: self._log.log_process(proc, files={'jdl': SafeFile(jdl).read()}) finally: utils.removeFiles([jdl]) return (jobNum, utils.QM(gcID, self._createId(gcID), None), { 'jdl': str.join('', jdlData) })
def updateErrorDict(self, fileName): for line in ifilter(lambda x: x.startswith('#'), SafeFile(fileName).readlines()): tmp = lmap(str.strip, line.lstrip('#').split(' - ', 1)) if tmp[0].isdigit() and (len(tmp) == 2): self.errorDict[int(tmp[0])] = tmp[1]
def _write_file(self, fn, message=None, **kwargs): fp = SafeFile(fn, 'w') if message is not None: fp.write(message) self._view.write(fp, **kwargs) fp.close()
def commit(self, jobNum, jobObj): fp = SafeFile(os.path.join(self._dbPath, 'job_%d.txt' % jobNum), 'w') fp.writelines(self._fmt.format(self._serialize_job_obj(jobObj))) fp.close() self._jobMap[jobNum] = jobObj
def commit(self, jobNum, jobObj): fp = SafeFile(os.path.join(self._dbPath, 'job_%d.txt' % jobNum), 'w') fp.writelines( utils.DictFormat(escapeString=True).format(jobObj.getAll())) fp.close()
def _getJobsOutput(self, ids): if len(ids) == 0: raise StopIteration basePath = os.path.join(self._outputPath, 'tmp') try: if len(ids) == 1: # For single jobs create single subdir tmpPath = os.path.join(basePath, md5(ids[0][0]).hexdigest()) else: tmpPath = basePath utils.ensureDirExists(tmpPath) except Exception: raise BackendError( 'Temporary path "%s" could not be created.' % tmpPath, BackendError) jobNumMap = dict(ids) jobs = self.writeWMSIds(ids) activity = Activity('retrieving %d job outputs' % len(ids)) proc = LocalProcess(self._outputExec, '--noint', '--logfile', '/dev/stderr', '-i', jobs, '--dir', tmpPath) # yield output dirs todo = jobNumMap.values() currentJobNum = None for line in imap(str.strip, proc.stdout.iter(timeout=60)): if line.startswith(tmpPath): todo.remove(currentJobNum) outputDir = line.strip() if os.path.exists(outputDir): if 'GC_WC.tar.gz' in os.listdir(outputDir): wildcardTar = os.path.join(outputDir, 'GC_WC.tar.gz') try: tarfile.TarFile.open(wildcardTar, 'r:gz').extractall(outputDir) os.unlink(wildcardTar) except Exception: self._log.error( 'Can\'t unpack output files contained in %s', wildcardTar) yield (currentJobNum, line.strip()) currentJobNum = None else: currentJobNum = jobNumMap.get(self._createId(line), currentJobNum) retCode = proc.status(timeout=0, terminate=True) activity.finish() if retCode != 0: if 'Keyboard interrupt raised by user' in proc.stderr.read( timeout=0): utils.removeFiles([jobs, basePath]) raise StopIteration else: self._log.log_process(proc, files={'jobs': SafeFile(jobs).read()}) self._log.error('Trying to recover from error ...') for dirName in os.listdir(basePath): yield (None, os.path.join(basePath, dirName)) # return unretrievable jobs for jobNum in todo: yield (jobNum, None) utils.removeFiles([jobs, basePath])