def __init__(self, config, name): config.set('se input timeout', '0:30') config.set('dataset provider', 'DBS3Provider') config.set('dataset splitter', 'EventBoundarySplitter') config.set('dataset processor', 'LumiDataProcessor', '+=') config.set('partition processor', 'TFCPartitionProcessor LocationPartitionProcessor MetaPartitionProcessor ' + 'LFNPartitionProcessor LumiPartitionProcessor CMSSWPartitionProcessor') dash_config = config.changeView(viewClass = 'SimpleConfigView', setSections = ['dashboard']) dash_config.set('application', 'cmsRun') SCRAMTask.__init__(self, config, name) if self._scramProject != 'CMSSW': raise ConfigError('Project area contains no CMSSW project') self._oldReleaseTop = None if self._projectArea: self._oldReleaseTop = self._parse_scram_file(os.path.join(self._projectArea, '.SCRAM', self._scramArch, 'Environment')).get('RELEASETOP', None) self.updateErrorDict(utils.pathShare('gc-run.cmssw.sh', pkg = 'grid_control_cms')) self._projectAreaTarballSE = config.getBool(['se runtime', 'se project area'], True) self._projectAreaTarball = config.getWorkPath('cmssw-project-area.tar.gz') # Prolog / Epilog script support - warn about old syntax self.prolog = TaskExecutableWrapper(config, 'prolog', '') self.epilog = TaskExecutableWrapper(config, 'epilog', '') if config.getPaths('executable', []) != []: raise ConfigError('Prefix executable and argument options with either prolog or epilog!') self.arguments = config.get('arguments', '') # Get cmssw config files and check their existance # Check that for dataset jobs the necessary placeholders are in the config file if self._dataSplitter is None: self.eventsPerJob = config.get('events per job', '0') # this can be a variable like @USER_EVENTS@! fragment = config.getPath('instrumentation fragment', utils.pathShare('fragmentForCMSSW.py', pkg = 'grid_control_cms')) self.configFiles = self._processConfigFiles(config, list(self._getConfigFiles(config)), fragment, autoPrepare = config.getBool('instrumentation', True), mustPrepare = (self._dataSplitter is not None)) # Create project area tarball if self._projectArea and not os.path.exists(self._projectAreaTarball): config.setState(True, 'init', detail = 'sandbox') # Information about search order for software environment self.searchLoc = self._getCMSSWPaths(config) if config.getState('init', detail = 'sandbox'): if os.path.exists(self._projectAreaTarball): if not utils.getUserBool('CMSSW tarball already exists! Do you want to regenerate it?', True): return # Generate CMSSW tarball if self._projectArea: utils.genTarball(self._projectAreaTarball, utils.matchFiles(self._projectArea, self._projectAreaPattern)) if self._projectAreaTarballSE: config.setState(True, 'init', detail = 'storage')
def __init__(self, config, name): LocalWMS.__init__(self, config, name, submitExec=utils.pathShare('gc-host.sh'), statusExec=utils.resolveInstallPath('ps'), cancelExec=utils.resolveInstallPath('kill'))
def getSBInFiles(self): files = DataTask.getSBInFiles(self) + self.prolog.getSBInFiles() + self.epilog.getSBInFiles() for cfgFile in self.configFiles: files.append(utils.Result(pathAbs = cfgFile, pathRel = os.path.basename(cfgFile))) if len(self.projectArea) and not self._projectAreaTarballSE: files.append(utils.Result(pathAbs = self._projectAreaTarball, pathRel = os.path.basename(self._projectAreaTarball))) return files + [utils.Result(pathAbs = utils.pathShare('gc-run.cmssw.sh', pkg = 'grid_control_cms'), pathRel = 'gc-run.cmssw.sh')]
def _submitJob(self, jobNum, module): activity = utils.ActivityLog('submitting jobs') try: sandbox = tempfile.mkdtemp('', '%s.%04d.' % (module.taskID, jobNum), self.sandPath) except Exception: raise BackendError('Unable to create sandbox directory "%s"!' % sandbox) sbPrefix = sandbox.replace(self.sandPath, '').lstrip('/') def translateTarget(d, s, t): return (d, s, os.path.join(sbPrefix, t)) self.smSBIn.doTransfer( ismap(translateTarget, self._getSandboxFilesIn(module))) self._writeJobConfig( os.path.join(sandbox, '_jobconfig.sh'), jobNum, module, { 'GC_SANDBOX': sandbox, 'GC_SCRATCH_SEARCH': str.join(' ', self.scratchPath) }) reqs = self.brokerSite.brokerAdd(module.getRequirements(jobNum), WMS.SITES) reqs = dict(self.brokerQueue.brokerAdd(reqs, WMS.QUEUES)) if (self.memory > 0) and (reqs.get(WMS.MEMORY, 0) < self.memory): reqs[ WMS. MEMORY] = self.memory # local jobs need higher (more realistic) memory requirements (stdout, stderr) = (os.path.join(sandbox, 'gc.stdout'), os.path.join(sandbox, 'gc.stderr')) jobName = module.getDescription(jobNum).jobName proc = utils.LoggedProcess( self.submitExec, '%s %s "%s" %s' % (self.submitOpts, self.getSubmitArguments(jobNum, jobName, reqs, sandbox, stdout, stderr), utils.pathShare('gc-local.sh'), self.getJobArguments(jobNum, sandbox))) retCode = proc.wait() wmsIdText = proc.getOutput().strip().strip('\n') try: wmsId = self.parseSubmitOutput(wmsIdText) except Exception: wmsId = None del activity if retCode != 0: self._log.warning('%s failed:', self.submitExec) elif wmsId is None: self._log.warning('%s did not yield job id:\n%s', self.submitExec, wmsIdText) if wmsId: wmsId = self._createId(wmsId) open(os.path.join(sandbox, wmsId), 'w') else: proc.logError(self.errorLog) return (jobNum, utils.QM(wmsId, wmsId, None), {'sandbox': sandbox})
def __init__(self, config, wmsName): WMS.__init__(self, config, wmsName) if self.wmsName != self.__class__.__name__.upper(): utils.vprint('Using batch system: %s (%s)' % (self.__class__.__name__, self.wmsName), -1) else: utils.vprint('Using batch system: %s' % self.wmsName, -1) self.errorLog = config.getWorkPath('error.tar') self._runlib = config.getWorkPath('gc-run.lib') if not os.path.exists(self._runlib): fp = SafeFile(self._runlib, 'w') content = SafeFile(utils.pathShare('gc-run.lib')).read() fp.write(content.replace('__GC_VERSION__', __import__('grid_control').__version__)) fp.close() self._outputPath = config.getWorkPath('output') utils.ensureDirExists(self._outputPath, 'output directory') self._failPath = config.getWorkPath('fail') # Initialise access token, broker and storage manager self._token = config.getCompositePlugin(['proxy', 'access token'], 'TrivialAccessToken', 'MultiAccessToken', cls = AccessToken, inherit = True, tags = [self]) # UI -> SE -> WN self.smSEIn = config.getPlugin('se input manager', 'SEStorageManager', cls = StorageManager, tags = [self], pargs = ('se', 'se input', 'SE_INPUT')) self.smSBIn = config.getPlugin('sb input manager', 'LocalSBStorageManager', cls = StorageManager, tags = [self], pargs = ('sandbox', 'sandbox', 'SB_INPUT')) # UI <- SE <- WN self.smSEOut = config.getPlugin('se output manager', 'SEStorageManager', cls = StorageManager, tags = [self], pargs = ('se', 'se output', 'SE_OUTPUT')) self.smSBOut = None self.fileNamesEnvironment = config.getBool("file names environment", True, onChange = None)
def run(args=None, intro=True): # display the 'grid-control' logo and version if intro and not os.environ.get('GC_DISABLE_INTRO'): sys.stdout.write(SafeFile(utils.pathShare('logo.txt'), 'r').read()) sys.stdout.write('Revision: %s\n' % utils.getVersion()) pyver = (sys.version_info[0], sys.version_info[1]) if pyver < (2, 3): utils.deprecated( 'This python version (%d.%d) is not supported anymore!' % pyver) Activity.root = Activity('Running grid-control', name='root') # top level activity instance # main try... except block to catch exceptions and show error message try: config = gc_create_config(args or sys.argv[1:], useDefaultFiles=True) workflow = gc_create_workflow(config) try: sys.exit(workflow.run()) finally: sys.stdout.write('\n') except SystemExit: # avoid getting caught for Python < 2.5 raise except Exception: # coverage overrides sys.excepthook gc_excepthook(*sys.exc_info()) sys.exit(os.EX_SOFTWARE)
def __init__(self, config, name): # Determine ROOT path from previous settings / environment / config file self._rootpath = config.get('root path', os.environ.get('ROOTSYS', ''), persistent=True, onChange=changeInitNeeded('sandbox')) if not self._rootpath: raise ConfigError( 'Either set environment variable "ROOTSYS" or set option "root path"!' ) utils.vprint('Using the following ROOT path: %s' % self._rootpath, -1) # Special handling for executables bundled with ROOT self._executable = config.get('executable', onChange=changeInitNeeded('sandbox')) exeFull = os.path.join(self._rootpath, 'bin', self._executable.lstrip('/')) self.builtIn = os.path.exists(exeFull) if self.builtIn: config.set('send executable', 'False') # store resolved built-in executable path? # Apply default handling from UserTask UserTask.__init__(self, config, name) self.updateErrorDict(utils.pathShare('gc-run.root.sh')) # Collect lib files needed by executable self.libFiles = []
def getSBInFiles(self): files = SCRAMTask.getSBInFiles(self) + self.prolog.getSBInFiles() + self.epilog.getSBInFiles() for cfgFile in self.configFiles: files.append(utils.Result(pathAbs = cfgFile, pathRel = os.path.basename(cfgFile))) if self._projectArea and not self._projectAreaTarballSE: files.append(utils.Result(pathAbs = self._projectAreaTarball, pathRel = os.path.basename(self._projectAreaTarball))) return files + [utils.Result(pathAbs = utils.pathShare('gc-run.cmssw.sh', pkg = 'grid_control_cms'), pathRel = 'gc-run.cmssw.sh')]
def __init__(self, config, name): config.set('se input timeout', '0:30') config.set('dataset provider', 'DBS3Provider') config.set('dataset splitter', 'EventBoundarySplitter') config.set('partition processor', 'CMSPartitionProcessor LocationPartitionProcessor LumiPartitionProcessor') config.set('dataset processor', 'LumiDataProcessor', '+=') DataTask.__init__(self, config, name) self.updateErrorDict(utils.pathShare('gc-run.cmssw.sh', pkg = 'grid_control_cms')) # SCRAM settings self._configureSCRAMSettings(config) self.useReqs = config.getBool('software requirements', True, onChange = None) self._projectAreaTarballSE = config.getBool(['se project area', 'se runtime'], True) self._projectAreaTarball = config.getWorkPath('cmssw-project-area.tar.gz') # Information about search order for software environment self.searchLoc = self._getCMSSWPaths(config) # Prolog / Epilog script support - warn about old syntax self.prolog = TaskExecutableWrapper(config, 'prolog', '') self.epilog = TaskExecutableWrapper(config, 'epilog', '') if config.getPaths('executable', []) != []: raise ConfigError('Prefix executable and argument options with either prolog or epilog!') self.arguments = config.get('arguments', '') # Get cmssw config files and check their existance # Check that for dataset jobs the necessary placeholders are in the config file if self.dataSplitter is None: self.eventsPerJob = config.get('events per job', '0') fragment = config.getPath('instrumentation fragment', utils.pathShare('fragmentForCMSSW.py', pkg = 'grid_control_cms')) self.configFiles = self._processConfigFiles(config, list(self._getConfigFiles(config)), fragment, autoPrepare = config.getBool('instrumentation', True), mustPrepare = (self.dataSplitter is not None)) # Create project area tarball if not os.path.exists(self._projectAreaTarball): config.setState(True, 'init', detail = 'sandbox') if config.getState('init', detail = 'sandbox'): if os.path.exists(self._projectAreaTarball): if not utils.getUserBool('CMSSW tarball already exists! Do you want to regenerate it?', True): return # Generate CMSSW tarball if self.projectArea: utils.genTarball(self._projectAreaTarball, utils.matchFiles(self.projectArea, self.pattern)) if self._projectAreaTarballSE: config.setState(True, 'init', detail = 'storage')
def __init__(self, config, name): LocalWMS.__init__(self, config, name, submitExec=utils.pathShare('gc-host.sh'), checkExecutor=CheckJobsMissingState( config, Host_CheckJobs(config)), cancelExecutor=Host_CancelJobs(config))
def __init__(self, config, name, checkExecutor, cancelExecutor): WMS.__init__(self, config, name) for executor in [checkExecutor, cancelExecutor]: executor.setup(self._log) (self._check_executor, self._cancel_executor) = (checkExecutor, cancelExecutor) if self._name != self.__class__.__name__.upper(): self._log.info('Using batch system: %s (%s)', self.__class__.__name__, self._name) else: self._log.info('Using batch system: %s', self._name) self.errorLog = config.getWorkPath('error.tar') self._runlib = config.getWorkPath('gc-run.lib') if not os.path.exists(self._runlib): fp = SafeFile(self._runlib, 'w') content = SafeFile(utils.pathShare('gc-run.lib')).read() fp.write( content.replace('__GC_VERSION__', __import__('grid_control').__version__)) fp.close() self._outputPath = config.getWorkPath('output') self._filecachePath = config.getWorkPath('files') utils.ensureDirExists(self._outputPath, 'output directory') self._failPath = config.getWorkPath('fail') # Initialise access token and storage managers # UI -> SE -> WN self.smSEIn = config.getPlugin('se input manager', 'SEStorageManager', cls=StorageManager, tags=[self], pargs=('se', 'se input', 'SE_INPUT')) self.smSBIn = config.getPlugin('sb input manager', 'LocalSBStorageManager', cls=StorageManager, tags=[self], pargs=('sandbox', 'sandbox', 'SB_INPUT')) # UI <- SE <- WN self.smSEOut = config.getPlugin('se output manager', 'SEStorageManager', cls=StorageManager, tags=[self], pargs=('se', 'se output', 'SE_OUTPUT')) self.smSBOut = None self._token = config.getCompositePlugin(['proxy', 'access token'], 'TrivialAccessToken', 'MultiAccessToken', cls=AccessToken, inherit=True, tags=[self])
def se_runcmd(cmd, varDict, *urls): runLib = utils.pathShare('gc-run.lib') args = str.join( ' ', imap(lambda x: '"%s"' % ensurePrefix(x).replace('dir://', 'file://'), urls)) varString = str.join( ' ', imap(lambda x: 'export %s="%s";' % (x, varDict[x]), varDict)) return LocalProcess( '/bin/bash', '-c', '. %s || exit 99; %s %s %s' % (runLib, varString, cmd, args))
def __init__(self, config, wmsName): WMS.__init__(self, config, wmsName) if self.wmsName != self.__class__.__name__.upper(): utils.vprint( 'Using batch system: %s (%s)' % (self.__class__.__name__, self.wmsName), -1) else: utils.vprint('Using batch system: %s' % self.wmsName, -1) self.errorLog = config.getWorkPath('error.tar') self._runlib = config.getWorkPath('gc-run.lib') if not os.path.exists(self._runlib): fp = SafeFile(self._runlib, 'w') content = SafeFile(utils.pathShare('gc-run.lib')).read() fp.write( content.replace('__GC_VERSION__', __import__('grid_control').__version__)) fp.close() self._outputPath = config.getWorkPath('output') utils.ensureDirExists(self._outputPath, 'output directory') self._failPath = config.getWorkPath('fail') # Initialise access token, broker and storage manager self._token = config.getCompositePlugin(['proxy', 'access token'], 'TrivialAccessToken', 'MultiAccessToken', cls=AccessToken, inherit=True, tags=[self]) # UI -> SE -> WN self.smSEIn = config.getPlugin('se input manager', 'SEStorageManager', cls=StorageManager, tags=[self], pargs=('se', 'se input', 'SE_INPUT')) self.smSBIn = config.getPlugin('sb input manager', 'LocalSBStorageManager', cls=StorageManager, tags=[self], pargs=('sandbox', 'sandbox', 'SB_INPUT')) # UI <- SE <- WN self.smSEOut = config.getPlugin('se output manager', 'SEStorageManager', cls=StorageManager, tags=[self], pargs=('se', 'se output', 'SE_OUTPUT')) self.smSBOut = None self.fileNamesEnvironment = config.getBool("file names environment", True, onChange=None)
def gc_cmd_line_parser(cmd_line_args): parser = Options(usage = '%s [OPTIONS] <config file>', add_help_option = False) parser.addBool(None, ' ', 'debug', default = False) parser.addBool(None, ' ', 'help-conf', default = False) parser.addBool(None, ' ', 'help-confmin', default = False) parser.addBool(None, 'c', 'continuous', default = False) parser.addBool(None, 'h', 'help', default = False) parser.addBool(None, 'i', 'init', default = False) parser.addBool(None, 'q', 'resync', default = False) parser.addBool(None, 's', 'no-submission', default = True, dest = 'submission') parser.addBool(None, 'G', 'gui', default = False, dest = 'gui_ansi') parser.addBool(None, 'W', 'webserver', default = False, dest = 'gui_cp') parser.addAccu(None, 'v', 'verbose') parser.addList(None, 'l', 'logging') parser.addList(None, 'o', 'override') parser.addText(None, ' ', 'action') parser.addText(None, 'd', 'delete') parser.addText(None, 'J', 'job-selector') parser.addText(None, 'm', 'max-retry') parser.addText(None, ' ', 'reset') # Deprecated options - refer to new report script instead for (sopt, lopt) in [('-r', 'report'), ('-R', 'site-report'), ('-T', 'time-report'), ('-M', 'task-report'), ('-D', 'detail-report'), ('', 'help-vars')]: parser.addBool(None, sopt, lopt, default = False, dest = 'old_report') (opts, args, _) = parser.parse(args = cmd_line_args) opts.gui = None if opts.gui_ansi: opts.gui = 'ANSIGUI' elif opts.gui_cp: opts.gui = 'CPWebserver' opts.continuous = opts.continuous or None # either True or None # Display help if opts.help: sys.stderr.write('%s\n\n%s\n' % (parser.usage(), open(utils.pathShare('help.txt'), 'r').read())) sys.exit(os.EX_USAGE) # Require single config file argument if len(args) == 0: utils.exitWithUsage(parser.usage(), 'Config file not specified!') elif len(args) > 1: utils.exitWithUsage(parser.usage(), 'Invalid command line arguments: %r' % cmd_line_args) # Warn about deprecated report options if opts.old_report: utils.deprecated('Please use the more versatile report tool in the scripts directory!') # Configure preliminary logging utils.verbosity(opts.verbose) logging.getLogger().setLevel(max(1, logging.DEFAULT - opts.verbose)) if opts.debug: # Setup initial debug handler before it is reconfigured by logging_setup handler = logging.StreamHandler(sys.stdout) handler.setFormatter(ExceptionFormatter(showCodeContext = 2, showVariables = 1, showFileStack = 1)) logging.getLogger('exception').addHandler(handler) return (opts, args)
def parse_cmd_line(cmd_line_args): parser = Options(usage='%s [OPTIONS] <config file>', add_help_option=False) parser.addBool(None, ' ', 'debug', default=False) parser.addBool(None, ' ', 'help-conf', default=False) parser.addBool(None, ' ', 'help-confmin', default=False) parser.addBool(None, 'c', 'continuous', default=False) parser.addBool(None, 'h', 'help', default=False) parser.addBool(None, 'i', 'init', default=False) parser.addBool(None, 'q', 'resync', default=False) parser.addBool(None, ' ', 'createwd', default=False) parser.addBool(None, 's', 'no-submission', default=True, dest='submission') parser.addBool(None, 'G', 'gui', default=False, dest='gui_ansi') parser.addAccu(None, 'v', 'verbose') parser.addList(None, 'l', 'logging') parser.addList(None, 'o', 'override') parser.addText(None, ' ', 'action') parser.addText(None, 'd', 'delete') parser.addText(None, 'J', 'job-selector') parser.addText(None, 'm', 'max-retry') parser.addText(None, ' ', 'reset') # Deprecated options - refer to new report script instead for (sopt, lopt) in [('-r', 'report'), ('-R', 'site-report'), ('-T', 'time-report'), ('-M', 'task-report'), ('-D', 'detail-report'), ('', 'help-vars')]: parser.addBool(None, sopt, lopt, default=False, dest='old_report') (opts, args, _) = parser.parse(args=cmd_line_args) opts.gui = None if opts.gui_ansi: opts.gui = 'ANSIGUI' opts.continuous = opts.continuous or None # either True or None # Display help if opts.help: utils.exitWithUsage(parser.usage(), open(utils.pathShare('help.txt'), 'r').read(), show_help=False) # Require single config file argument if len(args) == 0: utils.exitWithUsage(parser.usage(), 'Config file not specified!') elif len(args) > 1: utils.exitWithUsage( parser.usage(), 'Invalid command line arguments: %r' % cmd_line_args) # Warn about deprecated report options if opts.old_report: utils.deprecated( 'Please use the more versatile report tool in the scripts directory!' ) # Configure preliminary logging logging.getLogger().setLevel(max(1, logging.DEFAULT - opts.verbose)) return (opts, args)
def __init__(self, config, name): NamedPlugin.__init__(self, config, name) initSandbox = changeInitNeeded('sandbox') self._varCheck = validNoVar(config) # Task requirements jobs_config = config.changeView(viewClass = 'TaggedConfigView', addSections = ['jobs'], addTags = [self]) # Move this into parameter manager? self.wallTime = jobs_config.getTime('wall time', onChange = None) self.cpuTime = jobs_config.getTime('cpu time', self.wallTime, onChange = None) self.cpus = jobs_config.getInt('cpus', 1, onChange = None) self.memory = jobs_config.getInt('memory', -1, onChange = None) self.nodeTimeout = jobs_config.getTime('node timeout', -1, onChange = initSandbox) # Compute / get task ID self.taskID = config.get('task id', 'GC' + md5_hex(str(time()))[:12], persistent = True) self.taskDate = config.get('task date', strftime('%Y-%m-%d'), persistent = True, onChange = initSandbox) self.taskConfigName = config.getConfigName() self._job_name_generator = config.getPlugin('job name generator', 'DefaultJobName', cls = JobNamePlugin, pargs = (self,)) # Storage setup storage_config = config.changeView(viewClass = 'TaggedConfigView', setClasses = None, setNames = None, addSections = ['storage'], addTags = [self]) self.taskVariables = { # Space limits 'SCRATCH_UL': storage_config.getInt('scratch space used', 5000, onChange = initSandbox), 'SCRATCH_LL': storage_config.getInt('scratch space left', 1, onChange = initSandbox), 'LANDINGZONE_UL': storage_config.getInt('landing zone space used', 100, onChange = initSandbox), 'LANDINGZONE_LL': storage_config.getInt('landing zone space left', 1, onChange = initSandbox), } storage_config.set('se output pattern', 'job_@GC_JOB_ID@_@X@') self.seMinSize = storage_config.getInt('se min size', -1, onChange = initSandbox) self.sbInputFiles = config.getPaths('input files', [], onChange = initSandbox) self.sbOutputFiles = config.getList('output files', [], onChange = initSandbox) self.gzipOut = config.getBool('gzip output', True, onChange = initSandbox) self._subst_files = config.getList('subst files', [], onChange = initSandbox) self.dependencies = lmap(str.lower, config.getList('depends', [], onChange = initSandbox)) # Get error messages from gc-run.lib comments self.errorDict = {} self.updateErrorDict(utils.pathShare('gc-run.lib')) # Init parameter source manager psrc_repository = {} self._setupJobParameters(config, psrc_repository) self._pfactory = config.getPlugin('internal parameter factory', 'BasicParameterFactory', cls = ParameterFactory, pargs = (psrc_repository,), tags = [self], inherit = True) self.source = config.getPlugin('parameter adapter', 'TrackedParameterAdapter', cls = ParameterAdapter, pargs = (self._pfactory.getSource(),))
def __init__(self, config, name): NamedPlugin.__init__(self, config, name) initSandbox = changeInitNeeded('sandbox') self._varCheck = validNoVar(config) # Task requirements jobs_config = config.changeView(viewClass = 'TaggedConfigView', addSections = ['jobs'], addTags = [self]) # Move this into parameter manager? self.wallTime = jobs_config.getTime('wall time', onChange = None) self.cpuTime = jobs_config.getTime('cpu time', self.wallTime, onChange = None) self.cpus = jobs_config.getInt('cpus', 1, onChange = None) self.memory = jobs_config.getInt('memory', -1, onChange = None) self.nodeTimeout = jobs_config.getTime('node timeout', -1, onChange = initSandbox) # Compute / get task ID self.taskID = config.get('task id', 'GC' + md5_hex(str(time()))[:12], persistent = True) self.taskDate = config.get('task date', strftime('%Y-%m-%d'), persistent = True, onChange = initSandbox) self.taskConfigName = config.getConfigName() self._job_name_generator = config.getPlugin('job name generator', 'DefaultJobName', cls = JobNamePlugin, pargs = (self,)) # Storage setup storage_config = config.changeView(viewClass = 'TaggedConfigView', setClasses = None, setNames = None, addSections = ['storage'], addTags = [self]) self.taskVariables = { # Space limits 'SCRATCH_UL': storage_config.getInt('scratch space used', 5000, onChange = initSandbox), 'SCRATCH_LL': storage_config.getInt('scratch space left', 1, onChange = initSandbox), 'LANDINGZONE_UL': storage_config.getInt('landing zone space used', 100, onChange = initSandbox), 'LANDINGZONE_LL': storage_config.getInt('landing zone space left', 1, onChange = initSandbox), } storage_config.set('se output pattern', 'job_@GC_JOB_ID@_@X@') self.seMinSize = storage_config.getInt('se min size', -1, onChange = initSandbox) self.sbInputFiles = config.getPaths('input files', [], onChange = initSandbox) self.sbOutputFiles = config.getList('output files', [], onChange = initSandbox) self.gzipOut = config.getBool('gzip output', True, onChange = initSandbox) self.substFiles = config.getList('subst files', [], onChange = initSandbox) self.dependencies = lmap(str.lower, config.getList('depends', [], onChange = initSandbox)) # Get error messages from gc-run.lib comments self.errorDict = {} self.updateErrorDict(utils.pathShare('gc-run.lib')) # Init parameter source manager self._setupJobParameters(config) self._pfactory = config.getPlugin('internal parameter factory', 'BasicParameterFactory', cls = ParameterFactory, tags = [self], inherit = True) self.source = config.getPlugin('parameter adapter', 'TrackedParameterAdapter', cls = ParameterAdapter, pargs = (self._pfactory.getSource(),))
def _submitJob(self, jobNum, module): activity = utils.ActivityLog('submitting jobs') try: sandbox = self.sandPath # defined here for exception message in case os.mkdir fails if not os.path.exists(self.sandPath): os.mkdir(self.sandPath) sandbox = tempfile.mkdtemp('', '%s.%04d.' % (module.taskID, jobNum), self.sandPath) except Exception: raise BackendError('Unable to create sandbox directory "%s"!' % sandbox) sbPrefix = sandbox.replace(self.sandPath, '').lstrip('/') def translateTarget(d, s, t): return (d, s, os.path.join(sbPrefix, t)) self.smSBIn.doTransfer(ismap(translateTarget, self._getSandboxFilesIn(module))) cfgPath = os.path.join(sandbox, '_jobconfig.sh') self._writeJobConfig(cfgPath, jobNum, module, {'GC_SANDBOX': sandbox, 'GC_SCRATCH_SEARCH': str.join(' ', self.scratchPath)}) reqs = self.brokerSite.brokerAdd(module.getRequirements(jobNum), WMS.SITES) reqs = dict(self.brokerQueue.brokerAdd(reqs, WMS.QUEUES)) if (self.memory > 0) and (reqs.get(WMS.MEMORY, 0) < self.memory): reqs[WMS.MEMORY] = self.memory # local jobs need higher (more realistic) memory requirements (stdout, stderr) = (os.path.join(sandbox, 'gc.stdout'), os.path.join(sandbox, 'gc.stderr')) jobName = module.getDescription(jobNum).jobName proc = utils.LoggedProcess(self.submitExec, '%s %s "%s" %s' % (self.submitOpts, self.getSubmitArguments(jobNum, jobName, reqs, sandbox, stdout, stderr), utils.pathShare('gc-local.sh'), self.getJobArguments(jobNum, sandbox))) retCode = proc.wait() wmsIdText = proc.getOutput().strip().strip('\n') try: wmsId = self.parseSubmitOutput(wmsIdText) except Exception: wmsId = None del activity if retCode != 0: utils.eprint('WARNING: %s failed:' % self.submitExec) elif wmsId is None: utils.eprint('WARNING: %s did not yield job id:\n%s' % (self.submitExec, wmsIdText)) if wmsId: wmsId = self._createId(wmsId) open(os.path.join(sandbox, wmsId), 'w') else: proc.logError(self.errorLog) return (jobNum, utils.QM(wmsId, wmsId, None), {'sandbox': sandbox})
def __init__(self, config, name): NamedPlugin.__init__(self, config, name) initSandbox = changeInitNeeded('sandbox') # Task requirements configJobs = config.changeView(viewClass = TaggedConfigView, addSections = ['jobs'], addTags = [self]) # Move this into parameter manager? self.wallTime = configJobs.getTime('wall time', onChange = None) self.cpuTime = configJobs.getTime('cpu time', self.wallTime, onChange = None) self.cpus = configJobs.getInt('cpus', 1, onChange = None) self.memory = configJobs.getInt('memory', -1, onChange = None) self.nodeTimeout = configJobs.getTime('node timeout', -1, onChange = initSandbox) # Compute / get task ID self.taskID = config.get('task id', 'GC' + md5(str(time())).hexdigest()[:12], persistent = True) self.taskDate = config.get('task date', strftime('%Y-%m-%d'), persistent = True, onChange = initSandbox) self.taskConfigName = config.getConfigName() # Storage setup configStorage = config.changeView(viewClass = TaggedConfigView, setClasses = None, setNames = None, addSections = ['storage'], addTags = [self]) self.taskVariables = { # Space limits 'SCRATCH_UL': configStorage.getInt('scratch space used', 5000, onChange = initSandbox), 'SCRATCH_LL': configStorage.getInt('scratch space left', 1, onChange = initSandbox), 'LANDINGZONE_UL': configStorage.getInt('landing zone space used', 100, onChange = initSandbox), 'LANDINGZONE_LL': configStorage.getInt('landing zone space left', 1, onChange = initSandbox), } configStorage.set('se output pattern', 'job_@GC_JOB_ID@_@X@') self.seMinSize = configStorage.getInt('se min size', -1, onChange = initSandbox) self.sbInputFiles = config.getPaths('input files', [], onChange = initSandbox) self.sbOutputFiles = config.getList('output files', [], onChange = initSandbox) self.gzipOut = config.getBool('gzip output', True, onChange = initSandbox) self.substFiles = config.getList('subst files', [], onChange = initSandbox) self.dependencies = map(str.lower, config.getList('depends', [], onChange = initSandbox)) # Get error messages from gc-run.lib comments self.errorDict = dict(self.updateErrorDict(utils.pathShare('gc-run.lib'))) # Init parameter source manager pm = config.getPlugin('parameter factory', 'SimpleParameterFactory', cls = ParameterFactory, inherit = True).getInstance() configParam = config.changeView(viewClass = TaggedConfigView, addSections = ['parameters'], addTags = [self]) self.setupJobParameters(configParam, pm) self.source = pm.getSource(configParam)
def _getSandboxFiles(self, task, monitor, smList): # Prepare all input files depList = set( ichain(imap(lambda x: x.getDependencies(), [task] + smList))) depPaths = lmap(lambda pkg: utils.pathShare('', pkg=pkg), os.listdir(utils.pathPKG())) depFiles = lmap( lambda dep: utils.resolvePath('env.%s.sh' % dep, depPaths), depList) taskEnv = utils.mergeDicts( imap(lambda x: x.getTaskConfig(), [monitor, task] + smList)) taskEnv.update({ 'GC_DEPFILES': str.join(' ', depList), 'GC_USERNAME': self._token.getUsername(), 'GC_WMS_NAME': self.wmsName }) taskConfig = sorted( utils.DictFormat(escapeString=True).format( taskEnv, format='export %s%s%s\n')) varMappingDict = dict( izip(monitor.getTaskConfig().keys(), monitor.getTaskConfig().keys())) varMappingDict.update(task.getVarMapping()) varMapping = sorted( utils.DictFormat(delimeter=' ').format(varMappingDict, format='%s%s%s\n')) # Resolve wildcards in task input files def getTaskFiles(): for f in task.getSBInFiles(): matched = glob.glob(f.pathAbs) if matched != []: for match in matched: yield match else: yield f.pathAbs return lchain([ monitor.getFiles(), depFiles, getTaskFiles(), [ VirtualFile('_config.sh', taskConfig), VirtualFile('_varmap.dat', varMapping) ] ])
def _submitJob(self, jobNum, module): activity = Activity('submitting job %d' % jobNum) try: sandbox = tempfile.mkdtemp('', '%s.%04d.' % (module.taskID, jobNum), self._sandbox_helper.get_path()) except Exception: raise BackendError('Unable to create sandbox directory "%s"!' % sandbox) sbPrefix = sandbox.replace(self._sandbox_helper.get_path(), '').lstrip('/') def translateTarget(d, s, t): return (d, s, os.path.join(sbPrefix, t)) self.smSBIn.doTransfer(ismap(translateTarget, self._getSandboxFilesIn(module))) self._writeJobConfig(os.path.join(sandbox, '_jobconfig.sh'), jobNum, module, { 'GC_SANDBOX': sandbox, 'GC_SCRATCH_SEARCH': str.join(' ', self.scratchPath)}) reqs = self.brokerSite.brokerAdd(module.getRequirements(jobNum), WMS.SITES) reqs = dict(self.brokerQueue.brokerAdd(reqs, WMS.QUEUES)) if (self.memory > 0) and (reqs.get(WMS.MEMORY, 0) < self.memory): reqs[WMS.MEMORY] = self.memory # local jobs need higher (more realistic) memory requirements (stdout, stderr) = (os.path.join(sandbox, 'gc.stdout'), os.path.join(sandbox, 'gc.stderr')) jobName = module.getDescription(jobNum).jobName proc = LoggedProcess(self.submitExec, '%s %s "%s" %s' % (self.submitOpts, self.getSubmitArguments(jobNum, jobName, reqs, sandbox, stdout, stderr), utils.pathShare('gc-local.sh'), self.getJobArguments(jobNum, sandbox))) retCode = proc.wait() gcIDText = proc.getOutput().strip().strip('\n') try: gcID = self.parseSubmitOutput(gcIDText) except Exception: gcID = None activity.finish() if retCode != 0: self._log.warning('%s failed:', self.submitExec) elif gcID is None: self._log.warning('%s did not yield job id:\n%s', self.submitExec, gcIDText) if gcID: gcID = self._createId(gcID) open(os.path.join(sandbox, gcID), 'w') else: proc.logError(self.errorLog) return (jobNum, utils.QM(gcID, gcID, None), {'sandbox': sandbox})
def parse_cmd_line(cmd_line_args): parser = Options(usage = '%s [OPTIONS] <config file>', add_help_option = False) parser.addBool(None, ' ', 'debug', default = False) parser.addBool(None, ' ', 'help-conf', default = False) parser.addBool(None, ' ', 'help-confmin', default = False) parser.addBool(None, 'c', 'continuous', default = False) parser.addBool(None, 'h', 'help', default = False) parser.addBool(None, 'i', 'init', default = False) parser.addBool(None, 'q', 'resync', default = False) parser.addBool(None, 's', 'no-submission', default = True, dest = 'submission') parser.addBool(None, 'G', 'gui', default = False, dest = 'gui_ansi') parser.addAccu(None, 'v', 'verbose') parser.addList(None, 'l', 'logging') parser.addList(None, 'o', 'override') parser.addText(None, ' ', 'action') parser.addText(None, 'd', 'delete') parser.addText(None, 'J', 'job-selector') parser.addText(None, 'm', 'max-retry') parser.addText(None, ' ', 'reset') # Deprecated options - refer to new report script instead for (sopt, lopt) in [('-r', 'report'), ('-R', 'site-report'), ('-T', 'time-report'), ('-M', 'task-report'), ('-D', 'detail-report'), ('', 'help-vars')]: parser.addBool(None, sopt, lopt, default = False, dest = 'old_report') (opts, args, _) = parser.parse(args = cmd_line_args) opts.gui = None if opts.gui_ansi: opts.gui = 'ANSIGUI' opts.continuous = opts.continuous or None # either True or None # Display help if opts.help: utils.exitWithUsage(parser.usage(), open(utils.pathShare('help.txt'), 'r').read(), show_help = False) # Require single config file argument if len(args) == 0: utils.exitWithUsage(parser.usage(), 'Config file not specified!') elif len(args) > 1: utils.exitWithUsage(parser.usage(), 'Invalid command line arguments: %r' % cmd_line_args) # Warn about deprecated report options if opts.old_report: utils.deprecated('Please use the more versatile report tool in the scripts directory!') # Configure preliminary logging logging.getLogger().setLevel(max(1, logging.DEFAULT - opts.verbose)) return (opts, args)
def matchSites(self, endpoint): activity = Activity('Discovering available WMS services - testing %s' % endpoint) checkArgs = ['-a'] if endpoint: checkArgs.extend(['-e', endpoint]) checkArgs.append(utils.pathShare('null.jdl')) proc = LocalProcess(self._exeGliteWMSJobListMatch, *checkArgs) result = [] for line in proc.stdout.iter(timeout = 3): if line.startswith(' - '): result.append(line[3:].strip()) activity.finish() if proc.status(timeout = 0) is None: self.wms_timeout[endpoint] = self.wms_timeout.get(endpoint, 0) + 1 if self.wms_timeout.get(endpoint, 0) > 10: # remove endpoints after 10 failures self.wms_all.remove(endpoint) return [] return result
def matchSites(self, endpoint): result = [] checkArgs = '-a' if endpoint: checkArgs += ' -e %s' % endpoint proc = utils.LoggedProcess(self._exeGliteWMSJobListMatch, checkArgs + ' %s' % utils.pathShare('null.jdl')) def matchThread(): # TODO: integrate timeout into loggedprocess for line in proc.iter(): if line.startswith(' - '): result.append(line[3:].strip()) thread = utils.gcStartThread('Matching jobs with WMS %s' % endpoint, matchThread) thread.join(timeout = 3) if thread.isAlive(): proc.kill() thread.join() self.wms_timeout[endpoint] = self.wms_timeout.get(endpoint, 0) + 1 if self.wms_timeout.get(endpoint, 0) > 10: # remove endpoints after 10 failures self.wms_all.remove(endpoint) return [] return result
def getExecAndTansfers(self, module): # resolve file paths for different pool types # handle gc executable separately (gcExec, transferFiles) = ('', []) if self.remoteType == PoolType.SSH or self.remoteType == PoolType.GSISSH: for target in imap(lambda d_s_t: d_s_t[2], self._getSandboxFilesIn(module)): if 'gc-run.sh' in target: gcExec=os.path.join(self.getWorkdirPath(), target) else: transferFiles.append(os.path.join(self.getWorkdirPath(), target)) else: for source in imap(lambda d_s_t: d_s_t[1], self._getSandboxFilesIn(module)): if 'gc-run.sh' in source: gcExec = source else: transferFiles.append(source) if self.settings["jdl"]["Universe"].lower() == "docker": gcExec="./gc-run.sh" transferFiles.append(utils.pathShare('gc-run.sh')) return (gcExec, transferFiles)
def __init__(self, config, name): # Determine ROOT path from previous settings / environment / config file self._rootpath = config.get('root path', os.environ.get('ROOTSYS', ''), persistent = True, onChange = changeInitNeeded('sandbox')) if not self._rootpath: raise ConfigError('Either set environment variable "ROOTSYS" or set option "root path"!') utils.vprint('Using the following ROOT path: %s' % self._rootpath, -1) # Special handling for executables bundled with ROOT self._executable = config.get('executable', onChange = changeInitNeeded('sandbox')) exeFull = os.path.join(self._rootpath, 'bin', self._executable.lstrip('/')) self.builtIn = os.path.exists(exeFull) if self.builtIn: config.set('send executable', 'False') # store resolved built-in executable path? # Apply default handling from UserTask UserTask.__init__(self, config, name) self.updateErrorDict(utils.pathShare('gc-run.root.sh')) # Collect lib files needed by executable self.libFiles = []
def run(): # set up signal handler for interrupts and debug session requests signal.signal(signal.SIGINT, handle_abort_interrupt) signal.signal(signal.SIGURG, handle_debug_interrupt) # display the 'grid-control' logo and version sys.stdout.write(open(utils.pathShare('logo.txt'), 'r').read()) sys.stdout.write('Revision: %s\n' % utils.getVersion()) pyver = (sys.version_info[0], sys.version_info[1]) if pyver < (2, 3): utils.deprecated('This python version (%d.%d) is not supported anymore!' % pyver) # main try... except block to catch exceptions and show error message try: config = gc_create_config(sys.argv[1:]) workflow = gc_create_workflow(config) sys.exit(workflow.run()) except SystemExit: pass except Exception: # coverage overrides sys.excepthook gc_excepthook(*sys.exc_info())
def run(): # set up signal handler for interrupts and debug session requests signal.signal(signal.SIGINT, handle_abort_interrupt) signal.signal(signal.SIGURG, handle_debug_interrupt) # display the 'grid-control' logo and version sys.stdout.write(open(utils.pathShare('logo.txt'), 'r').read()) sys.stdout.write('Revision: %s\n' % utils.getVersion()) pyver = (sys.version_info[0], sys.version_info[1]) if pyver < (2, 3): utils.deprecated( 'This python version (%d.%d) is not supported anymore!' % pyver) # main try... except block to catch exceptions and show error message try: config = gc_create_config(sys.argv[1:]) workflow = gc_create_workflow(config) sys.exit(workflow.run()) except SystemExit: pass except Exception: # coverage overrides sys.excepthook gc_excepthook(*sys.exc_info())
def _getSandboxFiles(self, module, monitor, smList): # Prepare all input files depList = set(itertools.chain(*map(lambda x: x.getDependencies(), [module] + smList))) depPaths = map(lambda pkg: utils.pathShare('', pkg = pkg), os.listdir(utils.pathGC('packages'))) depFiles = map(lambda dep: utils.resolvePath('env.%s.sh' % dep, depPaths), depList) taskEnv = list(itertools.chain(map(lambda x: x.getTaskConfig(), [monitor, module] + smList))) taskEnv.append({'GC_DEPFILES': str.join(' ', depList), 'GC_USERNAME': self.proxy.getUsername(), 'GC_WMS_NAME': self.wmsName}) taskConfig = sorted(utils.DictFormat(escapeString = True).format(utils.mergeDicts(taskEnv), format = 'export %s%s%s\n')) varMappingDict = dict(zip(monitor.getTaskConfig().keys(), monitor.getTaskConfig().keys())) varMappingDict.update(module.getVarMapping()) varMapping = sorted(utils.DictFormat(delimeter = ' ').format(varMappingDict, format = '%s%s%s\n')) # Resolve wildcards in module input files def getModuleFiles(): for f in module.getSBInFiles(): matched = glob.glob(f) if matched != []: for match in matched: yield match else: yield f return list(itertools.chain(monitor.getFiles(), depFiles, getModuleFiles(), [utils.VirtualFile('_config.sh', taskConfig), utils.VirtualFile('_varmap.dat', varMapping)]))
def run(args = None, intro = True): # display the 'grid-control' logo and version if intro and not os.environ.get('GC_DISABLE_INTRO'): sys.stdout.write(SafeFile(utils.pathShare('logo.txt'), 'r').read()) sys.stdout.write('Revision: %s\n' % utils.getVersion()) pyver = (sys.version_info[0], sys.version_info[1]) if pyver < (2, 3): utils.deprecated('This python version (%d.%d) is not supported anymore!' % pyver) Activity.root = Activity('Running grid-control', name = 'root') # top level activity instance # main try... except block to catch exceptions and show error message try: config = gc_create_config(args or sys.argv[1:], useDefaultFiles = True) workflow = gc_create_workflow(config) try: sys.exit(workflow.run()) finally: sys.stdout.write('\n') except SystemExit: # avoid getting caught for Python < 2.5 raise except Exception: # coverage overrides sys.excepthook gc_excepthook(*sys.exc_info()) sys.exit(os.EX_SOFTWARE)
def _getSandboxFiles(self, task, monitor, smList): # Prepare all input files depList = set(ichain(imap(lambda x: x.getDependencies(), [task] + smList))) depPaths = lmap(lambda pkg: utils.pathShare('', pkg = pkg), os.listdir(utils.pathPKG())) depFiles = lmap(lambda dep: utils.resolvePath('env.%s.sh' % dep, depPaths), depList) taskEnv = utils.mergeDicts(imap(lambda x: x.getTaskConfig(), [monitor, task] + smList)) taskEnv.update({'GC_DEPFILES': str.join(' ', depList), 'GC_USERNAME': self._token.getUsername(), 'GC_WMS_NAME': self._name}) taskConfig = sorted(utils.DictFormat(escapeString = True).format(taskEnv, format = 'export %s%s%s\n')) varMappingDict = dict(izip(monitor.getTaskConfig().keys(), monitor.getTaskConfig().keys())) varMappingDict.update(task.getVarMapping()) varMapping = sorted(utils.DictFormat(delimeter = ' ').format(varMappingDict, format = '%s%s%s\n')) # Resolve wildcards in task input files def getTaskFiles(): for f in task.getSBInFiles(): matched = glob.glob(f.pathAbs) if matched != []: for match in matched: yield match else: yield f.pathAbs return lchain([monitor.getFiles(), depFiles, getTaskFiles(), [VirtualFile('_config.sh', taskConfig), VirtualFile('_varmap.dat', varMapping)]])
def __init__(self, config, name, checkExecutor, cancelExecutor): WMS.__init__(self, config, name) for executor in [checkExecutor, cancelExecutor]: executor.setup(self._log) (self._check_executor, self._cancel_executor) = (checkExecutor, cancelExecutor) if self._name != self.__class__.__name__.upper(): self._log.info('Using batch system: %s (%s)', self.__class__.__name__, self._name) else: self._log.info('Using batch system: %s', self._name) self.errorLog = config.getWorkPath('error.tar') self._runlib = config.getWorkPath('gc-run.lib') if not os.path.exists(self._runlib): fp = SafeFile(self._runlib, 'w') content = SafeFile(utils.pathShare('gc-run.lib')).read() fp.write(content.replace('__GC_VERSION__', __import__('grid_control').__version__)) fp.close() self._outputPath = config.getWorkPath('output') self._filecachePath = config.getWorkPath('files') utils.ensureDirExists(self._outputPath, 'output directory') self._failPath = config.getWorkPath('fail') # Initialise access token and storage managers # UI -> SE -> WN self.smSEIn = config.getPlugin('se input manager', 'SEStorageManager', cls = StorageManager, tags = [self], pargs = ('se', 'se input', 'SE_INPUT')) self.smSBIn = config.getPlugin('sb input manager', 'LocalSBStorageManager', cls = StorageManager, tags = [self], pargs = ('sandbox', 'sandbox', 'SB_INPUT')) # UI <- SE <- WN self.smSEOut = config.getPlugin('se output manager', 'SEStorageManager', cls = StorageManager, tags = [self], pargs = ('se', 'se output', 'SE_OUTPUT')) self.smSBOut = None self._token = config.getCompositePlugin(['proxy', 'access token'], 'TrivialAccessToken', 'MultiAccessToken', cls = AccessToken, inherit = True, tags = [self])
def getFiles(self): yield pathShare('mon.dashboard.sh', pkg='grid_control_cms') for fn in ('DashboardAPI.py', 'Logger.py', 'apmon.py', 'report.py'): yield pathShare('..', 'DashboardAPI', fn, pkg='grid_control_cms')
def getScript(self): yield pathShare('mon.dashboard.sh', pkg='grid_control_cms')
def getFiles(self): for fn in ('DashboardAPI.py', 'Logger.py', 'ProcInfo.py', 'apmon.py', 'report.py'): yield utils.pathShare('..', 'DashboardAPI', fn, pkg = 'grid_control_cms')
def getScript(self): yield utils.pathShare('mon.dashboard.sh', pkg = 'grid_control_cms')
def _getSandboxFilesIn(self, task): return [ ('GC Runtime', utils.pathShare('gc-run.sh'), 'gc-run.sh'), ('GC Runtime library', self._runlib, 'gc-run.lib'), ('GC Sandbox', self._getSandboxName(task), 'gc-sandbox.tar.gz'), ]
def __init__(self, config, name): config.set('se input timeout', '0:30') config.set('dataset provider', 'DBS3Provider') config.set('dataset splitter', 'EventBoundarySplitter') config.set( 'partition processor', 'CMSPartitionProcessor LocationPartitionProcessor LumiPartitionProcessor' ) config.set('dataset processor', 'LumiDataProcessor', '+=') DataTask.__init__(self, config, name) self.updateErrorDict( utils.pathShare('gc-run.cmssw.sh', pkg='grid_control_cms')) # SCRAM settings self._configureSCRAMSettings(config) self.useReqs = config.getBool('software requirements', True, onChange=None) self._projectAreaTarballSE = config.getBool( ['se project area', 'se runtime'], True) self._projectAreaTarball = config.getWorkPath( 'cmssw-project-area.tar.gz') # Information about search order for software environment self.searchLoc = self._getCMSSWPaths(config) # Prolog / Epilog script support - warn about old syntax self.prolog = TaskExecutableWrapper(config, 'prolog', '') self.epilog = TaskExecutableWrapper(config, 'epilog', '') if config.getPaths('executable', []) != []: raise ConfigError( 'Prefix executable and argument options with either prolog or epilog!' ) self.arguments = config.get('arguments', '') # Get cmssw config files and check their existance # Check that for dataset jobs the necessary placeholders are in the config file if self.dataSplitter is None: self.eventsPerJob = config.get('events per job', '0') fragment = config.getPath( 'instrumentation fragment', utils.pathShare('fragmentForCMSSW.py', pkg='grid_control_cms')) self.configFiles = self._processConfigFiles( config, list(self._getConfigFiles(config)), fragment, autoPrepare=config.getBool('instrumentation', True), mustPrepare=(self.dataSplitter is not None)) # Create project area tarball if not os.path.exists(self._projectAreaTarball): config.setState(True, 'init', detail='sandbox') if config.getState('init', detail='sandbox'): if os.path.exists(self._projectAreaTarball): if not utils.getUserBool( 'CMSSW tarball already exists! Do you want to regenerate it?', True): return # Generate CMSSW tarball if self.projectArea: utils.genTarball( self._projectAreaTarball, utils.matchFiles(self.projectArea, self.pattern)) if self._projectAreaTarballSE: config.setState(True, 'init', detail='storage')
global log, handler log = None # set up signal handler for interrupts def interrupt(sig, frame): global log, handler utils.abort(True) log = utils.ActivityLog('Quitting grid-control! (This can take a few seconds...)') signal.signal(signal.SIGINT, handler) handler = signal.signal(signal.SIGINT, interrupt) # set up signal handler for debug session requests signal.signal(signal.SIGURG, debugInterruptHandler) # display the 'grid-control' logo and version utils.vprint(open(utils.pathShare('logo.txt'), 'r').read(), -1) utils.vprint('Revision: %s' % utils.getVersion(), -1) pyver = sys.version_info[0] + sys.version_info[1] / 10.0 if pyver < 2.3: utils.deprecated('This python version (%.1f) is not supported anymore!' % pyver) usage = 'Syntax: %s [OPTIONS] <config file>\n' % sys.argv[0] parser = optparse.OptionParser(add_help_option=False) parser.add_option('-h', '--help', dest='help', default=False, action='store_true') parser.add_option('', '--help-conf', dest='help_cfg', default=False, action='store_true') parser.add_option('', '--help-confmin', dest='help_scfg', default=False, action='store_true') parser.add_option('-i', '--init', dest='init', default=False, action='store_true') parser.add_option('-q', '--resync', dest='resync', default=False, action='store_true') parser.add_option('', '--debug', dest='debug', default=False, action='store_true') parser.add_option('-s', '--no-submission', dest='submission', default=True, action='store_false') parser.add_option('-c', '--continuous', dest='continuous', default=None, action='store_true')
def getSBInFiles(self): return UserTask.getSBInFiles(self) + self.libFiles + [ utils.Result(pathAbs=utils.pathShare('gc-run.root.sh'), pathRel='gc-run.root.sh') ]
def getFiles(self): yield pathShare('mon.dashboard.sh', pkg = 'grid_control_cms') for fn in ('DashboardAPI.py', 'Logger.py', 'apmon.py', 'report.py'): yield pathShare('..', 'DashboardAPI', fn, pkg = 'grid_control_cms')
def se_runcmd(cmd, varDict, *urls): runLib = utils.pathShare('gc-run.lib') args = str.join(' ', imap(lambda x: '"%s"' % ensurePrefix(x).replace('dir://', 'file://'), urls)) varString = str.join(' ', imap(lambda x: 'export %s="%s";' % (x, varDict[x]), varDict)) return LocalProcess('/bin/bash', '-c', '. %s || exit 99; %s %s %s' % (runLib, varString, cmd, args))
def getSBInFiles(self): return UserTask.getSBInFiles(self) + self.libFiles + [ utils.Result(pathAbs = utils.pathShare('gc-run.root.sh'), pathRel = 'gc-run.root.sh')]
def __init__(self, config, name): LocalWMS.__init__(self, config, name, submitExec = utils.pathShare('gc-host.sh'), checkExecutor = CheckJobsMissingState(config, Host_CheckJobs(config)), cancelExecutor = Host_CancelJobs(config))
def getSBInFiles(self): files = DataTask.getSBInFiles(self) + self.configFiles + self.prolog.getSBInFiles() + self.epilog.getSBInFiles() if len(self.projectArea) and not self.seRuntime: files.append(self.runtimePath) return files + [utils.pathShare('gc-run.cmssw.sh', pkg = 'grid_control_cms')]
def __init__(self, config, name): LocalWMS.__init__(self, config, name, submitExec = utils.pathShare('gc-host.sh'), statusExec = utils.resolveInstallPath('ps'), cancelExec = utils.resolveInstallPath('kill'))
def __init__(self, config, name): config.set('se input timeout', '0:30', override = False) config.set('dataset provider', 'DBS3Provider', override = False) config.set('dataset splitter', 'EventBoundarySplitter', override = False) DataTask.__init__(self, config, name) self.errorDict.update(dict(self.updateErrorDict(utils.pathShare('gc-run.cmssw.sh', pkg = 'grid_control_cms')))) # SCRAM info scramProject = config.getList('scram project', []) if len(scramProject): self.projectArea = config.getPath('project area', '') if len(self.projectArea): raise ConfigError('Cannot specify both SCRAM project and project area') if len(scramProject) != 2: raise ConfigError('SCRAM project needs exactly 2 arguments: PROJECT VERSION') else: self.projectArea = config.getPath('project area') # This works in tandem with provider_dbsv2.py ! self.selectedLumis = parseLumiFilter(config.get('lumi filter', '')) self.useReqs = config.getBool('software requirements', True, onChange = None) self.seRuntime = config.getBool('se runtime', False) self.runtimePath = config.getWorkPath('runtime.tar.gz') if len(self.projectArea): defaultPattern = '-.* -config bin lib python module */data *.xml *.sql *.cf[if] *.py -*/.git -*/.svn -*/CVS -*/work.*' self.pattern = config.getList('area files', defaultPattern.split()) if os.path.exists(self.projectArea): utils.vprint('Project area found in: %s' % self.projectArea, -1) else: raise ConfigError('Specified config area %r does not exist!' % self.projectArea) scramPath = os.path.join(self.projectArea, '.SCRAM') # try to open it try: fp = open(os.path.join(scramPath, 'Environment'), 'r') self.scramEnv = utils.DictFormat().parse(fp, keyParser = {None: str}) except: raise ConfigError('Project area file %s/.SCRAM/Environment cannot be parsed!' % self.projectArea) for key in ['SCRAM_PROJECTNAME', 'SCRAM_PROJECTVERSION']: if key not in self.scramEnv: raise ConfigError('Installed program in project area not recognized.') archs = filter(lambda x: os.path.isdir(os.path.join(scramPath, x)) and not x.startswith('.'), os.listdir(scramPath)) self.scramArch = config.get('scram arch', (archs + [noDefault])[0]) try: fp = open(os.path.join(scramPath, self.scramArch, 'Environment'), 'r') self.scramEnv.update(utils.DictFormat().parse(fp, keyParser = {None: str})) except: raise ConfigError('Project area file .SCRAM/%s/Environment cannot be parsed!' % self.scramArch) else: self.scramEnv = { 'SCRAM_PROJECTNAME': scramProject[0], 'SCRAM_PROJECTVERSION': scramProject[1] } self.scramArch = config.get('scram arch') self.scramVersion = config.get('scram version', 'scramv1') if self.scramEnv['SCRAM_PROJECTNAME'] != 'CMSSW': raise ConfigError('Project area not a valid CMSSW project area.') # Information about search order for software environment self.searchLoc = [] if config.getState('sandbox'): userPath = config.get('cmssw dir', '') if userPath != '': self.searchLoc.append(('CMSSW_DIR_USER', userPath)) if self.scramEnv.get('RELEASETOP', None): projPath = os.path.normpath('%s/../../../../' % self.scramEnv['RELEASETOP']) self.searchLoc.append(('CMSSW_DIR_PRO', projPath)) if len(self.searchLoc): utils.vprint('Local jobs will try to use the CMSSW software located here:', -1) for i, loc in enumerate(self.searchLoc): key, value = loc utils.vprint(' %i) %s' % (i + 1, value), -1) # Prolog / Epilog script support - warn about old syntax self.prolog = TaskExecutableWrapper(config, 'prolog', '') self.epilog = TaskExecutableWrapper(config, 'epilog', '') if config.getPaths('executable', []) != []: raise ConfigError('Prefix executable and argument options with either prolog or epilog!') self.arguments = config.get('arguments', '') # Get cmssw config files and check their existance self.configFiles = [] cfgDefault = QM(self.prolog.isActive() or self.epilog.isActive(), [], noDefault) for cfgFile in config.getPaths('config file', cfgDefault, mustExist = False): newPath = config.getWorkPath(os.path.basename(cfgFile)) if not os.path.exists(newPath): if not os.path.exists(cfgFile): raise ConfigError('Config file %r not found.' % cfgFile) shutil.copyfile(cfgFile, newPath) self.configFiles.append(newPath) # Check that for dataset jobs the necessary placeholders are in the config file self.prepare = config.getBool('prepare config', False) fragment = config.getPath('instrumentation fragment', os.path.join('packages', 'grid_control_cms', 'share', 'fragmentForCMSSW.py')) if self.dataSplitter != None: if config.getState('sandbox'): if len(self.configFiles) > 0: self.instrumentCfgQueue(self.configFiles, fragment, mustPrepare = True) else: self.eventsPerJob = config.get('events per job', '0') if config.getState(detail = 'sandbox') and self.prepare: self.instrumentCfgQueue(self.configFiles, fragment) if not os.path.exists(config.getWorkPath('runtime.tar.gz')): config.setState(True, detail = 'sandbox') if config.getState(detail = 'sandbox'): if os.path.exists(config.getWorkPath('runtime.tar.gz')): if not utils.getUserBool('Runtime already exists! Do you want to regenerate CMSSW tarball?', True): return # Generate runtime tarball (and move to SE) if self.projectArea: utils.genTarball(config.getWorkPath('runtime.tar.gz'), utils.matchFiles(self.projectArea, self.pattern)) if self.seRuntime: config.setState(True, detail = 'storage')