Example #1
0
    def _parseParameterTuple(self, varName, tupleValue, tupleType, varType,
                             varIndex):
        if tupleType == 'tuple':
            tupleDelimeter = self.get(self._getParameterOption(varName),
                                      'delimeter', ',')
            tupleStrings = lmap(
                str.strip,
                utils.split_advanced(tupleValue, lambda tok: tok in ' \n',
                                     lambda tok: False))
            tupleList = lmap(lambda t: parseTuple(t, tupleDelimeter),
                             tupleStrings)
        elif tupleType == 'binning':
            tupleList = lzip(tupleValue.split(), tupleValue.split()[1:])

        result = []
        for tupleEntry in tupleList:
            try:
                tmp = self._parseParameter(varName, tupleEntry[varIndex],
                                           varType)
            except Exception:
                raise ConfigError('Unable to parse %r' % repr(
                    (tupleEntry, tupleStrings)))
            if isinstance(tmp, list):
                if len(tmp) != 1:
                    raise ConfigError(
                        '[Variable: %s] Tuple entry (%s) expands to multiple variable entries (%s)!'
                        % (varName, tupleEntry[varIndex], tmp))
                result.append(tmp[0])
            else:
                result.append(tmp)
        return result
Example #2
0
	def __init__(self, remoteType="", **kwargs):
		self._log = logging.getLogger('backend.condor')
		self.cmd=False
		# pick requested remote connection
		try:
			self.remoteType = self.RPHType.str2enum(remoteType)
			self.cmd = self.RPHTemplate[self.remoteType]["command"]
			self.copy = self.RPHTemplate[self.remoteType]["copy"]
			self.path = self.RPHTemplate[self.remoteType]["path"]
			self.argFormat = self.RPHTemplate[self.remoteType]["argFormat"]
		except Exception:
			raise ConfigError("Request to initialize RemoteProcessHandler of unknown type: %s" % remoteType)
		# destination should be of type: [user@]host
		if self.remoteType==self.RPHType.SSH or self.remoteType==self.RPHType.GSISSH:
			try:
				self.cmd = self.cmd % { "rhost" : kwargs["host"] }
				self.copy = self.copy % { "rhost" : kwargs["host"] }
				self.host = kwargs["host"]
			except Exception:
				raise ConfigError("Request to initialize RemoteProcessHandler of type %s without remote host." % self.RPHType.enum2str(self.remoteType))
		# add default arguments for all commands
		self.cmd = self.cmd % { "cmdargs" : kwargs.get("cmdargs",""), "args" : kwargs.get("args","") }
		self.copy = self.copy % { "cpargs" : kwargs.get("cpargs",""), "args" : kwargs.get("args","") }
		# test connection once
		proc = LoggedProcess(self.cmd % { "cmd" : "exit"})
		ret = proc.getAll()[0]
		if ret != 0:
			raise CondorProcessError('Validation of remote connection failed!', proc)
		self._log.log(logging.INFO2, 'Remote interface initialized:\n\tCmd: %s\n\tCp : %s', self.cmd, self.copy)
Example #3
0
    def _processConfigFiles(self, config, cfgFiles, fragment_path, autoPrepare,
                            mustPrepare):
        # process list of uninitialized config files
        for (cfg, cfg_new,
             doPrepare) in self._cfgFindUninitialized(config, cfgFiles,
                                                      autoPrepare,
                                                      mustPrepare):
            if doPrepare and (autoPrepare or utils.getUserBool(
                    'Do you want to prepare %s for running over the dataset?' %
                    cfg, True)):
                self._cfgStore(cfg, cfg_new, fragment_path)
            else:
                self._cfgStore(cfg, cfg_new)

        result = []
        for cfg in cfgFiles:
            cfg_new = config.getWorkPath(os.path.basename(cfg))
            if not os.path.exists(cfg_new):
                raise ConfigError(
                    'Config file %r was not copied to the work directory!' %
                    cfg)
            isInstrumented = self._cfgIsInstrumented(cfg_new)
            if mustPrepare and not isInstrumented:
                raise ConfigError(
                    'Config file %r must use %s to work properly!' %
                    (cfg,
                     str.join(', ',
                              imap(lambda x: '@%s@' % x, self.neededVars()))))
            if autoPrepare and not isInstrumented:
                self._log.warning('Config file %r was not instrumented!', cfg)
            result.append(cfg_new)
        return result
Example #4
0
    def _process_config_file_list(self, config, config_file_list,
                                  fragment_path, auto_prepare, must_prepare):
        # process list of uninitialized config files
        iter_uninitialized_config_files = self._config_find_uninitialized(
            config, config_file_list, auto_prepare, must_prepare)
        for (cfg, cfg_new, do_prepare) in iter_uninitialized_config_files:
            ask_user_msg = 'Do you want to prepare %s for running over the dataset?' % cfg
            if do_prepare and (auto_prepare
                               or self._uii.prompt_bool(ask_user_msg, True)):
                self._config_store_backup(cfg, cfg_new, fragment_path)
            else:
                self._config_store_backup(cfg, cfg_new)

        result = []
        for cfg in config_file_list:
            cfg_new = config.get_work_path(os.path.basename(cfg))
            if not os.path.exists(cfg_new):
                raise ConfigError(
                    'Config file %r was not copied to the work directory!' %
                    cfg)
            is_instrumented = self._config_is_instrumented(cfg_new)
            if must_prepare and not is_instrumented:
                raise ConfigError(
                    'Config file %r must use %s to work properly!' %
                    (cfg,
                     str.join(
                         ', ',
                         imap(lambda x: '@%s@' % x, sorted(
                             self._needed_vn_set)))))
            if auto_prepare and not is_instrumented:
                self._log.warning('Config file %r was not instrumented!', cfg)
            result.append(cfg_new)
        return result
Example #5
0
def parseLumiFilter(lumiexpr):
    if lumiexpr == '':
        return None

    lumis = []
    from grid_control.config import ConfigError
    for token in imap(str.strip, lumiexpr.split(',')):
        token = lmap(str.strip, token.split('|'))
        if True in imap(str.isalpha,
                        token[0].lower().replace('min', '').replace('max',
                                                                    '')):
            if len(token) == 1:
                token.append('')
            try:
                json_fn = os.path.normpath(
                    os.path.expandvars(os.path.expanduser(token[0].strip())))
                json_fp = open(json_fn)
                lumis.extend(parseLumiFromJSON(json_fp.read(), token[1]))
                json_fp.close()
            except Exception:
                raise ConfigError(
                    'Could not process lumi filter file: %r (filter: %r)' %
                    tuple(token))
        else:
            try:
                lumis.append(parseLumiFromString(token[0]))
            except Exception:
                raise ConfigError(
                    'Could not process lumi filter expression:\n\t%s' %
                    token[0])
    return mergeLumi(lumis)
def lookupConfigParser(pconfig, outputKey, lookupKeys):
    def collectKeys(src):
        result = []
        src.fillParameterKeys(result)
        return result

    outputKey = collectKeys(outputKey)[0]
    if lookupKeys is None:
        lookupKeys = [pconfig.get('default lookup')]
    else:
        lookupKeys = collectKeys(lookupKeys)
    if not lookupKeys or lookupKeys == ['']:
        raise ConfigError('Lookup parameter not defined!')
    defaultMatcher = pconfig.get('', 'default matcher', 'equal')
    matchstrList = pconfig.get(outputKey.lstrip('!'), 'matcher',
                               defaultMatcher).lower().splitlines()
    if len(matchstrList) != len(lookupKeys):
        if len(matchstrList) == 1:
            matchstrList = matchstrList * len(lookupKeys)
        else:
            raise ConfigError(
                'Match-functions (length %d) and match-keys (length %d) do not match!'
                % (len(matchstrList), len(lookupKeys)))
    matchfun = []
    for matcherName in matchstrList:
        matchfun.append(Matcher.createInstance(matcherName, pconfig,
                                               outputKey))
    (content, order) = pconfig.getParameter(outputKey.lstrip('!'))
    if not pconfig.getBool(outputKey.lstrip('!'), 'empty set', False):
        for k in content:
            if len(content[k]) == 0:
                content[k].append('')
    return (outputKey, lookupKeys, matchfun, (content, order))
Example #7
0
def parse_lumi_filter(lumi_str):
    if lumi_str == '':
        return None

    run_lumi_range_list = []
    from grid_control.config import ConfigError
    for token in imap(str.strip, lumi_str.split(',')):
        token = lmap(str.strip, token.split('|'))
        if True in imap(str.isalpha,
                        token[0].lower().replace('min', '').replace('max',
                                                                    '')):
            if len(token) == 1:
                token.append('')
            try:
                json_fn = os.path.normpath(
                    os.path.expandvars(os.path.expanduser(token[0].strip())))
                run_lumi_range_list.extend(
                    parse_lumi_from_json(
                        SafeFile(json_fn).read_close(), token[1]))
            except Exception:
                raise ConfigError(
                    'Could not process lumi filter file: %r (filter: %r)' %
                    tuple(token))
        else:
            try:
                run_lumi_range_list.append(parse_lumi_from_str(token[0]))
            except Exception:
                raise ConfigError(
                    'Could not process lumi filter expression:\n\t%s' %
                    token[0])
    return merge_lumi_list(run_lumi_range_list)
Example #8
0
def frange(start, end = None, num = None, steps = None, format = '%g'):
	if (end is None) and (num is None):
		raise ConfigError('frange: No exit condition!')
	if (end is not None) and (num is not None) and (steps is not None):
		raise ConfigError('frange: Overdetermined parameters!')
	if (end is not None) and (num is not None) and (steps is None):
		steps = float(end - start) / (num - 1)
	if (end is not None) and (num is None):
		steps = steps or 1
		num = int(1 + (end - start) / steps)
	return lmap(lambda x: format % x, imap(lambda i: start + (steps or 1) * i, irange(num)))
Example #9
0
	def __init__(self, config, name):
		DataTask.__init__(self, config, name)
		config.set('area files matcher mode', 'ShellStyleMatcher')

		# SCRAM settings
		scram_arch_default = unspecified
		scram_project = config.get_list('scram project', [])
		if scram_project:  # manual scram setup
			if len(scram_project) != 2:
				raise ConfigError('%r needs exactly 2 arguments: <PROJECT> <VERSION>' % 'scram project')
			self._project_area = None
			self._project_area_selector_list = None
			self._scram_project = scram_project[0]
			self._scram_project_version = scram_project[1]
			# ensure project area is not used
			if 'project area' in config.get_option_list():
				raise ConfigError('Cannot specify both %r and %r' % ('scram project', 'project area'))

		else:  # scram setup used from project area
			self._project_area = config.get_dn('project area')
			self._always_matcher = Matcher.create_instance('AlwaysMatcher', config, [''])
			self._project_area_base_fn = config.get_bool('area files basename', True,
				on_change=TriggerInit('sandbox'))
			self._project_area_matcher = config.get_matcher('area files',
				'-.* -config bin lib python module data *.xml *.sql *.db *.cfi *.cff *.py -CVS -work.* *.pcm',
				default_matcher='BlackWhiteMatcher', on_change=TriggerInit('sandbox'))
			self._log.info('Project area found in: %s', self._project_area)

			# try to determine scram settings from environment settings
			scram_path = os.path.join(self._project_area, '.SCRAM')
			scram_env = self._parse_scram_file(os.path.join(scram_path, 'Environment'))
			try:
				self._scram_project = scram_env['SCRAM_PROJECTNAME']
				self._scram_project_version = scram_env['SCRAM_PROJECTVERSION']
			except:
				raise ConfigError('Installed program in project area not recognized.')

			def filter_arch_dir(dn):
				return os.path.isdir(os.path.join(scram_path, dn))
			for arch_dir in sorted(ifilter(filter_arch_dir, os.listdir(scram_path))):
				scram_arch_default = arch_dir

		self._scram_version = config.get('scram version', 'scramv1')
		self._scram_arch = config.get('scram arch', scram_arch_default)

		self._scram_req_list = []
		if config.get_bool('scram arch requirements', True, on_change=None):
			self._scram_req_list.append((WMS.SOFTWARE, 'VO-cms-%s' % self._scram_arch))
		if config.get_bool('scram project requirements', False, on_change=None):
			self._scram_req_list.append((WMS.SOFTWARE, 'VO-cms-%s' % self._scram_project))
		if config.get_bool('scram project version requirements', False, on_change=None):
			self._scram_req_list.append((WMS.SOFTWARE, 'VO-cms-%s' % self._scram_project_version))
Example #10
0
	def __init__(self, config, name):
		config.set('se input timeout', '0:30')
		config.set('dataset provider', 'DBS3Provider')
		config.set('dataset splitter', 'EventBoundarySplitter')
		config.set('dataset processor', 'LumiDataProcessor', '+=')
		config.set('partition processor', 'TFCPartitionProcessor LocationPartitionProcessor MetaPartitionProcessor ' +
			'LFNPartitionProcessor LumiPartitionProcessor CMSSWPartitionProcessor')
		dash_config = config.changeView(viewClass = 'SimpleConfigView', setSections = ['dashboard'])
		dash_config.set('application', 'cmsRun')
		SCRAMTask.__init__(self, config, name)
		if self._scramProject != 'CMSSW':
			raise ConfigError('Project area contains no CMSSW project')

		self._oldReleaseTop = None
		if self._projectArea:
			self._oldReleaseTop = self._parse_scram_file(os.path.join(self._projectArea, '.SCRAM', self._scramArch, 'Environment')).get('RELEASETOP', None)

		self.updateErrorDict(utils.pathShare('gc-run.cmssw.sh', pkg = 'grid_control_cms'))

		self._projectAreaTarballSE = config.getBool(['se runtime', 'se project area'], True)
		self._projectAreaTarball = config.getWorkPath('cmssw-project-area.tar.gz')

		# Prolog / Epilog script support - warn about old syntax
		self.prolog = TaskExecutableWrapper(config, 'prolog', '')
		self.epilog = TaskExecutableWrapper(config, 'epilog', '')
		if config.getPaths('executable', []) != []:
			raise ConfigError('Prefix executable and argument options with either prolog or epilog!')
		self.arguments = config.get('arguments', '')

		# Get cmssw config files and check their existance
		# Check that for dataset jobs the necessary placeholders are in the config file
		if self._dataSplitter is None:
			self.eventsPerJob = config.get('events per job', '0') # this can be a variable like @USER_EVENTS@!
		fragment = config.getPath('instrumentation fragment', utils.pathShare('fragmentForCMSSW.py', pkg = 'grid_control_cms'))
		self.configFiles = self._processConfigFiles(config, list(self._getConfigFiles(config)), fragment,
			autoPrepare = config.getBool('instrumentation', True),
			mustPrepare = (self._dataSplitter is not None))

		# Create project area tarball
		if self._projectArea and not os.path.exists(self._projectAreaTarball):
			config.setState(True, 'init', detail = 'sandbox')
		# Information about search order for software environment
		self.searchLoc = self._getCMSSWPaths(config)
		if config.getState('init', detail = 'sandbox'):
			if os.path.exists(self._projectAreaTarball):
				if not utils.getUserBool('CMSSW tarball already exists! Do you want to regenerate it?', True):
					return
			# Generate CMSSW tarball
			if self._projectArea:
				utils.genTarball(self._projectAreaTarball, utils.matchFiles(self._projectArea, self._projectAreaPattern))
			if self._projectAreaTarballSE:
				config.setState(True, 'init', detail = 'storage')
Example #11
0
    def __init__(self, config, name):
        self._name = name  # needed for changeView calls before the constructor
        head = [('DATASETNICK', 'Nickname')]

        # Mapping between nickname and config files:
        self._nmCfg = config.getLookup(
            'nickname config', {},
            defaultMatcher='regex',
            parser=lambda x: lmap(str.strip, x.split(',')),
            strfun=lambda x: str.join(',', x))
        if not self._nmCfg.empty():
            if 'config file' in config.getOptions():
                raise ConfigError(
                    "Please use 'nickname config' instead of 'config file'")
            allConfigFiles = sorted(
                set(utils.flatten(self._nmCfg.get_values())))
            config.set('config file', str.join('\n', allConfigFiles))
            head.append((1, 'Config file'))

        # Mapping between nickname and constants - only display - work is handled by the 'normal' parameter factory
        nmCName = config.getList('nickname constants', [], onChange=None)
        param_config = config.changeView(viewClass='TaggedConfigView',
                                         setClasses=None,
                                         setNames=None,
                                         addSections=['parameters'])
        param_config.set('constants', str.join(' ', nmCName), '+=')
        for cName in nmCName:
            param_config.set(cName + ' matcher', 'regex')
            param_config.set(cName + ' lookup', 'DATASETNICK')
            head.append((cName, cName))

        # Mapping between nickname and lumi filter - only display - work is handled by the 'normal' lumi filter
        if ('lumi filter' in config.getOptions()) and ('nickname lumi filter'
                                                       in config.getOptions()):
            raise ConfigError('Please use "lumi filter" exclusively')
        config.set('lumi filter matcher', 'regex')
        config.set(
            'lumi filter',
            strDictLong(
                config.getDict('nickname lumi filter', {}, onChange=None)))
        self._nmLumi = config.getLookup('lumi filter', {},
                                        parser=parseLumiFilter,
                                        strfun=strLumi,
                                        onChange=None)
        if not self._nmLumi.empty():
            head.append((2, 'Lumi filter'))

        CMSSW.__init__(self, config, name)
        self._displaySetup(config.getWorkPath('datacache.dat'), head)
Example #12
0
 def do_transfer(self, desc_source_target_list):
     for (desc, source, target) in desc_source_target_list:
         if not self._storage_paths:
             raise ConfigError(
                 "%s can't be transferred because '%s path wasn't set" %
                 (desc, self._storage_channel))
         for idx, se_path in enumerate(set(self._storage_paths)):
             activity = Activity('Copy %s to SE %d ' % (desc, idx + 1))
             proc = se_copy(source, os.path.join(se_path, target),
                            self._storage_force)
             proc.status(timeout=5 * 60, terminate=True)
             activity.finish()
             if proc.status(timeout=0) == 0:
                 self._log.info('Copy %s to SE %d finished', desc, idx + 1)
             else:
                 self._log.info('Copy %s to SE %d failed', desc, idx + 1)
                 self._log.log_process(proc)
                 self._log.critical(
                     'Unable to copy %s! You can try to copy it manually.',
                     desc)
                 msg = 'Is %s (%s) available on SE %s?' % (desc, source,
                                                           se_path)
                 if not UserInputInterface().prompt_bool(msg, False):
                     raise StorageError('%s is missing on SE %s!' %
                                        (desc, se_path))
Example #13
0
    def __init__(self, **kwargs):
        ProcessHandler.__init__(self, **kwargs)
        ssh_default_args = ' -vvv -o BatchMode=yes -o ForwardX11=no'
        self._shell_cmd = resolve_install_path('ssh') + ssh_default_args
        self._copy_cmd = resolve_install_path('scp') + ssh_default_args + ' -r'
        self._ssh_link_id = 0
        self._ssh_link_args = ''
        self._ssh_link_timestamp = 0
        self._ssh_link_fail_count = 0
        self._ssh_link_master_proc = None
        try:
            self._remote_host = kwargs['remote_host']
        except Exception:
            raise ConfigError(
                'Request to initialize SSH-Type RemoteProcessHandler without remote host.'
            )

        try:
            self._ssh_link_base = os.path.abspath(kwargs['sshLink'])
            # older ssh/gsissh puts a maximum length limit on control paths, use a different one
            if len(self._ssh_link_base) >= 107:
                self._ssh_link_base = os.path.expanduser(
                    '~/.ssh/%s' % os.path.basename(self._ssh_link_base))
            self._ssh_link = self._ssh_link_base
            _ssh_link_secure(self._ssh_link, init_dn=True)
            self._get_ssh_link()
        except KeyError:
            clear_current_exception()
            self._ssh_link = False

        # test connection once
        proc_test = self.logged_execute('exit')
        if proc_test.wait() != 0:
            raise CondorProcessError('Failed to validate remote connection.',
                                     proc_test)
Example #14
0
 def doTransfer(self, listDescSourceTarget):
     for (desc, source, target) in listDescSourceTarget:
         if not self.smPaths:
             raise ConfigError(
                 "%s can't be transferred because '%s path wasn't set" %
                 (desc, self.smOptPrefix))
         for idx, sePath in enumerate(set(self.smPaths)):
             utils.vprint('Copy %s to SE %d ' % (desc, idx + 1),
                          -1,
                          newline=False)
             sys.stdout.flush()
             proc = se_copy(source, os.path.join(sePath, target),
                            self.smForce)
             if proc.status(timeout=5 * 60, terminate=True) == 0:
                 utils.vprint('finished', -1)
             else:
                 utils.vprint('failed', -1)
                 utils.eprint(proc.stderr.read(timeout=0))
                 utils.eprint(
                     'Unable to copy %s! You can try to copy it manually.' %
                     desc)
                 if not utils.getUserBool(
                         'Is %s (%s) available on SE %s?' %
                     (desc, source, sePath), False):
                     raise StorageError('%s is missing on SE %s!' %
                                        (desc, sePath))
Example #15
0
	def __init__(self, **kwargs):
		ProcessHandler.__init__(self, **kwargs)
		self.__initcommands(**kwargs)
		self.defaultArgs="-vvv -o BatchMode=yes -o ForwardX11=no " + kwargs.get("defaultArgs","")
		self.socketArgs=""
		self.socketEnforce=kwargs.get("sshLinkEnforce",True)
		try:
			self.remoteHost = kwargs["remoteHost"]
		except Exception:
			raise ConfigError("Request to initialize SSH-Type RemoteProcessHandler without remote host.")
		try:
			self.sshLinkBase=os.path.abspath(kwargs["sshLink"])
			# older ssh/gsissh puts a maximum length limit on control paths, use a different one
			if ( len(self.sshLinkBase)>= 107):
				self.sshLinkBase=os.path.expanduser("~/.ssh/%s"%os.path.basename(self.sshLinkBase))
			self.sshLink=self.sshLinkBase
			self._secureSSHLink(initDirectory=True)
			self._socketHandler()
		except KeyError:
			self.sshLink=False
		# test connection once
		testProcess = self.LoggedExecute( "exit" )
		if testProcess.wait() != 0:
			testProcess.getError()
			raise CondorProcessError('Failed to validate remote connection.', testProcess)
Example #16
0
    def substitute_variables(self,
                             name,
                             inp,
                             jobnum=None,
                             additional_var_dict=None,
                             check=True):
        additional_var_dict = additional_var_dict or {}
        merged_var_dict = dict_union(additional_var_dict,
                                     self._get_const_job_env())
        if jobnum is not None:
            merged_var_dict.update(self.get_job_dict(jobnum))

        def _do_subst(value):
            return replace_with_dict(
                value, merged_var_dict,
                ichain([
                    self.get_var_alias_map().items(),
                    izip(additional_var_dict, additional_var_dict)
                ]))

        result = _do_subst(_do_subst(str(inp)))
        if check and self._var_checker.check(result):
            raise ConfigError('%s references unknown variables: %s' %
                              (name, result))
        return result
Example #17
0
	def __init__(self, config, datasetExpr, datasetNick = None):
		DataProvider.__init__(self, config, datasetExpr, datasetNick)

		(self._path, self._events, selist) = utils.optSplit(datasetExpr, '|@')
		self._selist = parseList(selist, ',') or None
		if not (self._path and self._events):
			raise ConfigError('Invalid dataset expression!\nCorrect: /local/path/to/file|events[@SE1,SE2]')
Example #18
0
 def doTransfer(self, listDescSourceTarget):
     for (desc, source, target) in listDescSourceTarget:
         if not self.smPaths:
             raise ConfigError(
                 "%s can't be transferred because '%s path wasn't set" %
                 (desc, self.smOptPrefix))
         for idx, sePath in enumerate(set(self.smPaths)):
             activity = Activity('Copy %s to SE %d ' % (desc, idx + 1))
             proc = se_copy(source, os.path.join(sePath, target),
                            self.smForce)
             proc.status(timeout=5 * 60, terminate=True)
             activity.finish()
             if proc.status(timeout=0) == 0:
                 self._log.info('Copy %s to SE %d finished', desc, idx + 1)
             else:
                 self._log.info('Copy %s to SE %d failed', desc, idx + 1)
                 self._log.critical(proc.stderr.read(timeout=0))
                 self._log.critical(
                     'Unable to copy %s! You can try to copy it manually.',
                     desc)
                 if not utils.getUserBool(
                         'Is %s (%s) available on SE %s?' %
                     (desc, source, sePath), False):
                     raise StorageError('%s is missing on SE %s!' %
                                        (desc, sePath))
Example #19
0
 def getSubmitArguments(self, jobNum, jobName, reqs, sandbox, stdout,
                        stderr):
     timeStr = lambda s: '%02d:%02d:%02d' % (s / 3600,
                                             (s / 60) % 60, s % 60)
     reqMap = {
         WMS.MEMORY: ('h_vmem', lambda m: '%dM' % m),
         WMS.WALLTIME: ('s_rt', timeStr),
         WMS.CPUTIME: ('h_cpu', timeStr)
     }
     # Restart jobs = no
     params = ' -r n -notify'
     if self._project:
         params += ' -P %s' % self._project
     # Job requirements
     (queue, nodes) = (reqs.get(WMS.QUEUES, [''])[0], reqs.get(WMS.SITES))
     if not nodes and queue:
         params += ' -q %s' % queue
     elif nodes and queue:
         params += ' -q %s' % str.join(
             ',', imap(lambda node: '%s@%s' % (queue, node), nodes))
     elif nodes:
         raise ConfigError(
             'Please also specify queue when selecting nodes!')
     return params + PBSGECommon.getCommonSubmitArguments(
         self, jobNum, jobName, reqs, sandbox, stdout, stderr, reqMap)
Example #20
0
    def __init__(self, config, name):
        # Determine ROOT path from previous settings / environment / config file
        self._rootpath = config.get('root path',
                                    os.environ.get('ROOTSYS', ''),
                                    persistent=True,
                                    onChange=changeInitNeeded('sandbox'))
        if not self._rootpath:
            raise ConfigError(
                'Either set environment variable "ROOTSYS" or set option "root path"!'
            )
        utils.vprint('Using the following ROOT path: %s' % self._rootpath, -1)

        # Special handling for executables bundled with ROOT
        self._executable = config.get('executable',
                                      onChange=changeInitNeeded('sandbox'))
        exeFull = os.path.join(self._rootpath, 'bin',
                               self._executable.lstrip('/'))
        self.builtIn = os.path.exists(exeFull)
        if self.builtIn:
            config.set('send executable', 'False')
            # store resolved built-in executable path?

        # Apply default handling from UserTask
        UserTask.__init__(self, config, name)
        self.updateErrorDict(utils.pathShare('gc-run.root.sh'))

        # Collect lib files needed by executable
        self.libFiles = []
Example #21
0
	def fill_parameter_content(self, pnum, result):
		output_tuple = self._helper.lookup(result)
		if output_tuple is None:
			return
		elif len(output_tuple) != 1:
			raise ConfigError("%s can't handle multiple lookup parameter sets!" % self.__class__.__name__)
		elif output_tuple[0] is not None:
			result[self._output_vn] = output_tuple[0]
Example #22
0
	def __init__(self, config, name):
		DataTask.__init__(self, config, name)

		# SCRAM settings
		scramArchDefault = noDefault
		scramProject = config.getList('scram project', [])
		if scramProject: # manual scram setup
			if len(scramProject) != 2:
				raise ConfigError('%r needs exactly 2 arguments: <PROJECT> <VERSION>' % 'scram project')
			self._projectArea = None
			self._projectAreaPattern = None
			self._scramProject = scramProject[0]
			self._scramProjectVersion = scramProject[1]
			# ensure project area is not used
			if 'project area' in config.getOptions():
				raise ConfigError('Cannot specify both %r and %r' % ('scram project', 'project area'))

		else: # scram setup used from project area
			self._projectArea = config.getPath('project area')
			self._projectAreaPattern = config.getList('area files', ['-.*', '-config', 'bin', 'lib', 'python', 'module',
				'*/data', '*.xml', '*.sql', '*.db', '*.cf[if]', '*.py', '-*/.git', '-*/.svn', '-*/CVS', '-*/work.*'])
			logging.getLogger('user').info('Project area found in: %s', self._projectArea)

			# try to determine scram settings from environment settings
			scramPath = os.path.join(self._projectArea, '.SCRAM')
			scramEnv = self._parse_scram_file(os.path.join(scramPath, 'Environment'))
			try:
				self._scramProject = scramEnv['SCRAM_PROJECTNAME']
				self._scramProjectVersion = scramEnv['SCRAM_PROJECTVERSION']
			except:
				raise ConfigError('Installed program in project area not recognized.')

			for arch_dir in sorted(ifilter(lambda dn: os.path.isdir(os.path.join(scramPath, dn)), os.listdir(scramPath))):
				scramArchDefault = arch_dir

		self._scramVersion = config.get('scram version', 'scramv1')
		self._scramArch = config.get('scram arch', scramArchDefault)

		self._scramReqs = []
		if config.getBool('scram arch requirements', True, onChange = None):
			self._scramReqs.append((WMS.SOFTWARE, 'VO-cms-%s' % self._scramArch))
		if config.getBool('scram project requirements', False, onChange = None):
			self._scramReqs.append((WMS.SOFTWARE, 'VO-cms-%s' % self._scramProject))
		if config.getBool('scram project version requirements', False, onChange = None):
			self._scramReqs.append((WMS.SOFTWARE, 'VO-cms-%s' % self._scramProjectVersion))
Example #23
0
def _get_lookup_args(pconfig, output_user, lookup_user_list):
    # Transform output and lookup input: eg. key('A', 'B') -> ['A', 'B']
    def _keys_to_vn_list(src):
        result = []
        src.fill_parameter_metadata(result)
        return lmap(lambda meta: meta.value, result)

    if isinstance(output_user, str):
        output_vn = output_user
    else:
        output_vn = _keys_to_vn_list(output_user)[0]
    if isinstance(lookup_user_list, str):
        lookup_vn_list = lookup_user_list.split()
    elif lookup_user_list is not None:
        lookup_vn_list = _keys_to_vn_list(lookup_user_list)
    else:  # no lookup information given - query config for default lookup variable
        lookup_vn_list = [pconfig.get('default lookup')]
    if not lookup_vn_list or lookup_vn_list == ['']:
        raise ConfigError('Lookup parameter not defined!')

    # configure lookup matcher
    name_matcher_default = pconfig.get('', 'default matcher', 'equal')
    name_matcher_raw = pconfig.get(output_vn, 'matcher', name_matcher_default)
    name_matcher_list = name_matcher_raw.lower().splitlines()
    if len(
            name_matcher_list
    ) == 1:  # single matcher given - extend to same length as lookup_list
        name_matcher_list = name_matcher_list * len(lookup_vn_list)
    elif len(name_matcher_list) != len(lookup_vn_list):
        raise ConfigError(
            'Match-functions (length %d) and match-keys (length %d) do not match!'
            % (len(name_matcher_list), len(lookup_vn_list)))
    lookup_matcher_list = []
    for name_matcher in name_matcher_list:
        lookup_matcher_list.append(
            Matcher.create_instance(name_matcher, pconfig, output_vn))

    # configure lookup dictionary
    (lookup_dict, lookup_order) = pconfig.get_parameter(output_vn)
    if not pconfig.get_bool(output_vn, 'empty set', False):
        for lookup_key in lookup_dict:
            if len(lookup_dict[lookup_key]) == 0:
                lookup_dict[lookup_key].append('')
    return (output_vn, lookup_vn_list, lookup_matcher_list, lookup_dict,
            lookup_order)
Example #24
0
	def _parse_scram_file(self, fn):
		try:
			fp = open(fn, 'r')
			try:
				return utils.DictFormat().parse(fp, keyParser = {None: str})
			finally:
				fp.close()
		except Exception:
			raise ConfigError('Project area file %s cannot be parsed!' % fn)
Example #25
0
 def _getConfigFiles(self, config):
     cfgDefault = utils.QM(self.prolog.isActive() or self.epilog.isActive(),
                           [], noDefault)
     for cfgFile in config.getPaths('config file',
                                    cfgDefault,
                                    mustExist=False):
         if not os.path.exists(cfgFile):
             raise ConfigError('Config file %r not found.' % cfgFile)
         yield cfgFile
 def fillParameterInfo(self, pNum, result):
     lookupResult = self._matcher.lookup(result)
     if lookupResult is None:
         return
     elif len(lookupResult) != 1:
         raise ConfigError(
             "%s can't handle multiple lookup parameter sets!" %
             self.__class__.__name__)
     elif lookupResult[0] is not None:
         result[self._key] = lookupResult[0]
Example #27
0
    def __init__(self, config):
        config.set('jobs', 'monitor', 'dashboard', override=False)
        config.set('grid', 'sites', '-samtest -cmsprodhi', append=True)

        site_db = CRIC()
        token = AccessToken.create_instance('VomsProxy', create_config(),
                                            'token')
        self._hn_name = site_db.dn_to_username(token.get_fq_user_name())
        if not self._hn_name:
            raise ConfigError('Unable to map grid certificate to hn name!')
Example #28
0
	def substVars(self, inp, jobNum = None, addDict = None, check = True):
		addDict = addDict or {}
		allVars = utils.mergeDicts([addDict, self.getTaskConfig()])
		if jobNum is not None:
			allVars.update(self.getJobConfig(jobNum))
		subst = lambda x: utils.replaceDict(x, allVars, ichain([self.getVarMapping().items(), izip(addDict, addDict)]))
		result = subst(subst(str(inp)))
		if check and self._varCheck.check(result):
			raise ConfigError("'%s' contains invalid variable specifiers: '%s'" % (inp, result))
		return result
Example #29
0
 def _setup(self, setup_key, setup_mod):
     if setup_key:
         (delim, ds, de) = utils.optSplit(setup_key, '::')
         modifier = identity
         if setup_mod and (setup_mod.strip() != 'value'):
             try:
                 modifier = eval('lambda value: ' + setup_mod)  # pylint:disable=eval-used
             except Exception:
                 raise ConfigError('Unable to parse delimeter modifier %r' %
                                   setup_mod)
         return (delim, parseStr(ds, int), parseStr(de, int), modifier)
Example #30
0
	def __init__(self, jobDB, task, jobs = None, configString = ''):
		Report.__init__(self, jobDB, task, jobs, configString)
		self._levelMap = {'wms': 2, 'endpoint': 3, 'site': 4, 'queue': 5}
		self._useHistory = ('history' in configString)
		configString = configString.replace('history', '')
		self._idxList = lmap(lambda x: self._levelMap[x.lower()], configString.split())
		self._idxList.reverse()
		if not self._idxList:
			raise ConfigError('Backend report was not configured!')
		self._stateMap = [(None, 'WAITING'), (Job.RUNNING, 'RUNNING'),
			(Job.FAILED, 'FAILED'), (Job.SUCCESS, 'SUCCESS')]