def fill(self, container): combinedEntry = container.getEntry('cmdargs', lambda entry: entry.section == 'global') newCmdLine = self._cmd_line_args if combinedEntry: newCmdLine = combinedEntry.value.split() + self._cmd_line_args (opts, _) = parse_cmd_line(newCmdLine) def setConfigFromOpt(section, option, value): if value is not None: self._addEntry(container, section, option, str(value), '<cmdline>') cmd_line_config_map = { 'state!': { '#init': opts.init, '#resync': opts.resync, '#display config': opts.help_conf, '#display minimal config': opts.help_confmin }, 'action': { 'delete': opts.delete, 'reset': opts.reset }, 'global': { 'gui': opts.gui, 'submission': opts.submission }, 'jobs': { 'max retry': opts.max_retry, 'selected': opts.job_selector }, 'logging': { 'debug mode': opts.debug }, } for section in cmd_line_config_map: for (option, value) in cmd_line_config_map[section].items(): setConfigFromOpt(section, option, value) for entry in opts.logging: tmp = entry.replace(':', '=').split('=') if len(tmp) == 1: tmp.append('DEBUG') setConfigFromOpt('logging', tmp[0] + ' level', tmp[1]) if opts.action is not None: setConfigFromOpt('workflow', 'action', opts.action.replace(',', ' ')) if opts.continuous: setConfigFromOpt('workflow', 'duration', -1) Plugin.createInstance('StringConfigFiller', opts.override).fill(container)
def fill(self, container): combinedEntry = container.getEntry('cmdargs', lambda entry: entry.section == 'global') newCmdLine = self._cmd_line_args if combinedEntry: newCmdLine = combinedEntry.value.split() + self._cmd_line_args (opts, _) = gc_cmd_line_parser(newCmdLine) def setConfigFromOpt(section, option, value): if value is not None: self._addEntry(container, section, option, str(value), '<cmdline>') cmd_line_config_map = { 'state!': { '#init': opts.init, '#resync': opts.resync, '#display config': opts.help_conf, '#display minimal config': opts.help_confmin }, 'action': { 'delete': opts.delete, 'reset': opts.reset }, 'global': { 'gui': opts.gui, 'submission': opts.submission }, 'jobs': { 'max retry': opts.max_retry, 'selected': opts.job_selector }, 'logging': { 'debug mode': opts.debug }, } for section in cmd_line_config_map: for (option, value) in cmd_line_config_map[section].items(): setConfigFromOpt(section, option, value) for entry in opts.logging: tmp = entry.replace(':', '=').split('=') if len(tmp) == 1: tmp.append('DEBUG') setConfigFromOpt('logging', tmp[0] + ' level', tmp[1]) if opts.action is not None: setConfigFromOpt('workflow', 'action', opts.action.replace(',', ' ')) if opts.continuous: setConfigFromOpt('workflow', 'duration', -1) Plugin.createInstance('StringConfigFiller', opts.override).fill(container)
def fill(self, container): combined_entry = container.get_entry( 'cmdargs', lambda entry: entry.section == 'global') new_cmd_line = self._cmd_line_args if combined_entry: new_cmd_line = combined_entry.value.split() + self._cmd_line_args (opts, _) = _parse_cmd_line(new_cmd_line) if opts.debug_console: handle_debug_interrupt() if opts.debug_trace: debug_trace_kwargs = {} for key_value in opts.debug_trace: debug_trace_kwargs[key_value.split('=')[0]] = key_value.split( '=')[1] DebugInterface().set_trace(**debug_trace_kwargs) def _set_config_from_opt(section, option, value): if value is not None: self._add_entry(container, section, option, str(value), '<cmdline>') # pylint:disable=no-member cmd_line_config_map = { 'state!': { '#init': opts.init, '#resync': opts.resync, '#display config': opts.help_conf, '#display minimal config': opts.help_confmin }, 'action': { 'delete': opts.delete, 'cancel': opts.cancel, 'reset': opts.reset }, 'global': { 'gui': opts.gui, 'submission': opts.submission }, 'jobs': { 'jobs': opts.jobs, 'max retry': opts.max_retry, 'selected': opts.job_selector }, 'logging': { 'debug mode': opts.debug }, } for section in cmd_line_config_map: for (option, value) in cmd_line_config_map[section].items(): _set_config_from_opt(section, option, value) for (logger_name, logger_level) in parse_logging_args(opts.logging): _set_config_from_opt('logging', logger_name + ' level', logger_level) if opts.action is not None: _set_config_from_opt('workflow', 'action', opts.action.replace(',', ' ')) if opts.continuous: _set_config_from_opt('workflow', 'duration', -1) if opts.override: Plugin.create_instance('StringConfigFiller', opts.override).fill(container)
def __init__(self, config, datasetExpr, datasetNick=None): ds_config = config.changeView(viewClass='TaggedConfigView', addNames=[md5_hex(datasetExpr)]) if os.path.isdir(datasetExpr): scan_pipeline = ['OutputDirsFromWork'] ds_config.set('source directory', datasetExpr) datasetExpr = os.path.join(datasetExpr, 'work.conf') else: scan_pipeline = ['OutputDirsFromConfig', 'MetadataFromTask'] datasetExpr, selector = utils.optSplit(datasetExpr, '%') ds_config.set('source config', datasetExpr) ds_config.set('source job selector', selector) ext_config = create_config(datasetExpr) ext_task_name = ext_config.changeView(setSections=['global']).get( ['module', 'task']) if 'ParaMod' in ext_task_name: # handle old config files ext_task_name = ext_config.changeView( setSections=['ParaMod']).get('module') ext_task_cls = Plugin.getClass(ext_task_name) for ext_task_cls in Plugin.getClass(ext_task_name).iterClassBases(): try: scan_holder = GCProviderSetup.getClass('GCProviderSetup_' + ext_task_cls.__name__) except PluginError: continue scan_pipeline += scan_holder.scan_pipeline break ScanProviderBase.__init__(self, ds_config, datasetExpr, datasetNick, scan_pipeline)
def create(cls, pconfig = None, name = 'subspace', factory = 'SimpleParameterFactory'): # pylint:disable=arguments-differ try: ParameterFactory = Plugin.getClass('ParameterFactory') config = pconfig.getConfig(viewClass = 'SimpleConfigView', addSections = [name]) return SubSpaceParameterSource(name, ParameterFactory.createInstance(factory, config)) except: raise ParameterError('Unable to create subspace %r using factory %r' % (name, factory))
def _get_plugin_factory_list(self, option, default=unspecified, cls=Plugin, require_plugin=True, single_plugin=False, desc='plugin factories', bind_args=None, bind_kwargs=None, **kwargs): bind_kwargs = dict(bind_kwargs or {}) bind_kwargs.setdefault('config', self) if isinstance(cls, str): cls = Plugin.get_class(cls) def _bind_plugins(value): obj_list = list(cls.bind(value, *(bind_args or []), **bind_kwargs)) if single_plugin and len(obj_list) > 1: raise ConfigError( 'This option only allows to specify a single plugin!') if require_plugin and not obj_list: raise ConfigError( 'This option requires to specify a valid plugin!') return obj_list return self._get_internal( desc, obj2str=lambda value: str.join( '\n', imap(lambda obj: obj.get_bind_value(), value)), str2obj=_bind_plugins, def2obj=_bind_plugins, option=option, default_obj=default, **kwargs)
def get_plugin_list(pname, inherit_prefix=False): alias_dict = {} inherit_map = {} cls = Plugin.get_class(pname) for entry in cls.get_class_info_list(): depth = entry.pop('depth', 0) (alias, name) = entry.popitem() alias_dict.setdefault(name, []).append((depth, alias)) def _process_child_map(mapping, prefix=''): for cls_name in mapping: inherit_map[cls_name] = _process_child_map(mapping[cls_name], prefix + '-' + cls_name) return prefix _process_child_map(cls.get_class_children(), pname) alias_dict.pop(pname, None) table_list = [] for name in alias_dict: # sorted by length of name and depth by_len_depth = sorted(alias_dict[name], key=lambda d_a: (-len(d_a[1]), d_a[0])) # sorted by depth and name by_depth_name = sorted(alias_dict[name], key=lambda d_a: (d_a[0], d_a[1])) new_name = by_len_depth.pop()[1] depth = min(imap(lambda d_a: d_a[0], alias_dict[name])) alias_list = lmap(lambda d_a: d_a[1], by_depth_name) alias_list.remove(new_name) if inherit_prefix: new_name = ' | ' * (inherit_map[name].count('-') - 1) + new_name entry = {'Name': new_name, 'Alias': str.join(', ', alias_list), 'Depth': '%02d' % depth, 'Inherit': inherit_map.get(name, '')} table_list.append(entry) return table_list
def _getPluginFactories(self, option, default=noDefault, cls=Plugin, tags=None, inherit=False, requirePlugin=True, singlePlugin=False, desc='plugin factories', **kwargs): if isinstance(cls, str): cls = Plugin.getClass(cls) def str2obj(value): objList = list( cls.bind(value, config=self, inherit=inherit, tags=tags or [])) if singlePlugin and len(objList) > 1: raise ConfigError( 'This option only allows to specify a single plugin!') if requirePlugin and not objList: raise ConfigError( 'This option requires to specify a valid plugin!') return objList obj2str = lambda value: str.join( '\n', imap(lambda obj: obj.bindValue(), value)) return self._getInternal(desc, obj2str, str2obj, str2obj, option, default, **kwargs)
def _useAvailableDataSource(self, source): DataParameterSource = Plugin.getClass('DataParameterSource') if DataParameterSource.datasetsAvailable and not DataParameterSource.datasetsUsed: if source is not None: return ParameterSource.createInstance('CrossParameterSource', DataParameterSource.create(), source) return DataParameterSource.create() return source
def create_psrc(cls, pconfig, repository, *args): # pylint:disable=arguments-differ name = 'subspace' factory = 'SimpleParameterFactory' if len(args) == 1: name = args[0] elif len(args) == 2: (factory, name) = (args[0], args[1]) try: config = pconfig.get_config(view_class='SimpleConfigView', add_sections=[name]) pfactory = Plugin.get_class('ParameterFactory').create_instance(factory, config) return SubSpaceParameterSource(name, pfactory, repository) except: raise ParameterError('Unable to create subspace %r using factory %r' % (name, factory))
def __init__(self, config, datasource_name, dataset_expr, dataset_nick=None, dataset_proc=None): ds_config = config.change_view(view_class='TaggedConfigView', add_names=[md5_hex(dataset_expr)]) if os.path.isdir(dataset_expr): scanner_list = ['OutputDirsFromWork'] ds_config.set('source directory', dataset_expr) dataset_expr = os.path.join(dataset_expr, 'work.conf') else: scanner_list = ['OutputDirsFromConfig', 'MetadataFromTask'] dataset_expr, selector = split_opt(dataset_expr, '%') ds_config.set('source config', dataset_expr) ds_config.set('source job selector', selector) ext_config = create_config(dataset_expr) ext_task_name = ext_config.change_view(set_sections=['global']).get(['module', 'task']) ext_task_cls = Plugin.get_class(ext_task_name) for ext_task_cls in Plugin.get_class(ext_task_name).iter_class_bases(): scan_setup_name = 'GCProviderSetup_' + ext_task_cls.__name__ scan_setup_cls = GCProviderSetup.get_class(scan_setup_name, ignore_missing=True) if scan_setup_cls: scanner_list += scan_setup_cls.scanner_list break ScanProviderBase.__init__(self, ds_config, datasource_name, dataset_expr, dataset_nick, dataset_proc, scanner_list)
def initGC(args): if len(args) > 0: config = getConfig(args[0]) userSelector = None if len(args) != 1: userSelector = JobSelector.create(args[1]) return (config, Plugin.createInstance('TextFileJobDB', config, jobSelector=userSelector)) sys.stderr.write('Syntax: %s <config file> [<job id>, ...]\n\n' % sys.argv[0]) sys.exit(os.EX_USAGE)
def create(cls, pconfig=None, name='subspace', factory='SimpleParameterFactory'): # pylint:disable=arguments-differ try: ParameterFactory = Plugin.getClass('ParameterFactory') config = pconfig.getConfig(viewClass='SimpleConfigView', addSections=[name]) return SubSpaceParameterSource( name, ParameterFactory.createInstance(factory, config)) except: raise ParameterError( 'Unable to create subspace %r using factory %r' % (name, factory))
def _getPluginFactories(self, option, default = noDefault, cls = Plugin, tags = None, inherit = False, requirePlugin = True, singlePlugin = False, desc = 'plugin factories', **kwargs): if isinstance(cls, str): cls = Plugin.getClass(cls) def str2obj(value): objList = list(cls.bind(value, config = self, inherit = inherit, tags = tags or [])) if singlePlugin and len(objList) > 1: raise ConfigError('This option only allows to specify a single plugin!') if requirePlugin and not objList: raise ConfigError('This option requires to specify a valid plugin!') return objList obj2str = lambda value: str.join('\n', imap(lambda obj: obj.bindValue(), value)) return self._getInternal(desc, obj2str, str2obj, str2obj, option, default, **kwargs)
def __init__(self, config, datasetExpr, datasetNick = None): ds_config = config.changeView(viewClass = 'TaggedConfigView', addNames = [md5_hex(datasetExpr)]) if os.path.isdir(datasetExpr): scan_pipeline = ['OutputDirsFromWork'] ds_config.set('source directory', datasetExpr) datasetExpr = os.path.join(datasetExpr, 'work.conf') else: scan_pipeline = ['OutputDirsFromConfig', 'MetadataFromTask'] datasetExpr, selector = utils.optSplit(datasetExpr, '%') ds_config.set('source config', datasetExpr) ds_config.set('source job selector', selector) ext_config = create_config(datasetExpr) ext_task_name = ext_config.changeView(setSections = ['global']).get(['module', 'task']) if 'ParaMod' in ext_task_name: # handle old config files ext_task_name = ext_config.changeView(setSections = ['ParaMod']).get('module') ext_task_cls = Plugin.getClass(ext_task_name) for ext_task_cls in Plugin.getClass(ext_task_name).iterClassBases(): try: scan_holder = GCProviderSetup.getClass('GCProviderSetup_' + ext_task_cls.__name__) except PluginError: continue scan_pipeline += scan_holder.scan_pipeline break ScanProviderBase.__init__(self, ds_config, datasetExpr, datasetNick, scan_pipeline)
def fill(self, container): combined_entry = container.get_entry('cmdargs', lambda entry: entry.section == 'global') new_cmd_line = self._cmd_line_args if combined_entry: new_cmd_line = combined_entry.value.split() + self._cmd_line_args (opts, _) = _parse_cmd_line(new_cmd_line) if opts.debug_console: handle_debug_interrupt() if opts.debug_trace: debug_trace_kwargs = {} for key_value in opts.debug_trace: debug_trace_kwargs[key_value.split('=')[0]] = key_value.split('=')[1] DebugInterface().set_trace(**debug_trace_kwargs) def _set_config_from_opt(section, option, value): if value is not None: self._add_entry(container, section, option, str(value), '<cmdline>') # pylint:disable=no-member cmd_line_config_map = { 'state!': {'#init': opts.init, '#resync': opts.resync, '#display config': opts.help_conf, '#display minimal config': opts.help_confmin}, 'action': {'delete': opts.delete, 'cancel': opts.cancel, 'reset': opts.reset}, 'global': {'gui': opts.gui, 'submission': opts.submission}, 'jobs': {'jobs': opts.jobs, 'max retry': opts.max_retry, 'selected': opts.job_selector}, 'logging': {'debug mode': opts.debug}, } for section in cmd_line_config_map: for (option, value) in cmd_line_config_map[section].items(): _set_config_from_opt(section, option, value) for (logger_name, logger_level) in parse_logging_args(opts.logging): _set_config_from_opt('logging', logger_name + ' level', logger_level) if opts.action is not None: _set_config_from_opt('workflow', 'action', opts.action.replace(',', ' ')) if opts.continuous: _set_config_from_opt('workflow', 'duration', -1) if opts.override: Plugin.create_instance('StringConfigFiller', opts.override).fill(container)
def create_psrc(cls, pconfig, repository, *args): # pylint:disable=arguments-differ name = 'subspace' factory = 'SimpleParameterFactory' if len(args) == 1: name = args[0] elif len(args) == 2: (factory, name) = (args[0], args[1]) try: config = pconfig.get_config(view_class='SimpleConfigView', add_sections=[name]) pfactory = Plugin.get_class('ParameterFactory').create_instance( factory, config) return SubSpaceParameterSource(name, pfactory, repository) except: raise ParameterError( 'Unable to create subspace %r using factory %r' % (name, factory))
def _get_plugin_factory_list(self, option, default=unspecified, cls=Plugin, require_plugin=True, single_plugin=False, desc='plugin factories', bind_args=None, bind_kwargs=None, **kwargs): bind_kwargs = dict(bind_kwargs or {}) bind_kwargs.setdefault('config', self) if isinstance(cls, str): cls = Plugin.get_class(cls) def _bind_plugins(value): obj_list = list(cls.bind(value, *(bind_args or []), **bind_kwargs)) if single_plugin and len(obj_list) > 1: raise ConfigError('This option only allows to specify a single plugin!') if require_plugin and not obj_list: raise ConfigError('This option requires to specify a valid plugin!') return obj_list return self._get_internal(desc, obj2str=lambda value: str.join('\n', imap(lambda obj: obj.get_bind_value(), value)), str2obj=_bind_plugins, def2obj=_bind_plugins, option=option, default_obj=default, **kwargs)
def getPluginList(pluginName): aliasDict = {} for entry in Plugin.getClass(pluginName).getClassList(): depth = entry.pop('depth', 0) (alias, name) = entry.popitem() aliasDict.setdefault(name, []).append((depth, alias)) aliasDict.pop(pluginName) tableList = [] for name in aliasDict: # sorted by length of name and depth by_len_depth = sorted(aliasDict[name], key=lambda d_a: (-len(d_a[1]), d_a[0])) # sorted by depth and name by_depth_name = sorted(aliasDict[name], key=lambda d_a: (d_a[0], d_a[1])) new_name = by_len_depth.pop()[1] aliasList = lmap(lambda d_a: d_a[1], by_depth_name) aliasList.remove(new_name) entry = {'Name': new_name, 'Alias': str.join(', ', aliasList)} if ('Multi' not in name) and ('Base' not in name): tableList.append(entry) return tableList
def __init__(self): self._log = logging.getLogger('parameters.source') Plugin.__init__(self)
import re try: from hpfwk import Plugin NickNameProducer = Plugin.getClass('NickNameProducer') except: from grid_control import datasets # FIXME: this line should be unnecessary from grid_control.datasets import NickNameProducer def lookup(data, keyMap, default = None): for key in keyMap: if key in data: return keyMap[key] return (default, '')[default == None] def lookupBegin(data, keyMap, default = None): print data for key in keyMap: if data.startswith(key): return keyMap[key] return (default, '')[default == None] def addPart(data): return ('_%s' % data, '')[data == ''] def parseCuts(data): tmp = re.findall('([0-9]+)to([0-9]+)', data.lower()) if tmp: return tmp[0] return filter(lambda x: int(x) > 0, re.findall('pt[-_]*([0-9]+)', data.lower()))
def __init__(self): Plugin.__init__(self) self._resyncInfo = None self._resyncTime = -1 # Default - always resync self._resyncLast = None
class MyNick(Plugin.getClass('NickNameProducer')): def getName(self, oldnick, dataset, block): if oldnick: return oldnick + '_changed' return 'newnick'
def getSource(self, config): DataParameterSource = Plugin.getClass('DataParameterSource') source = self._getRawSource(ParameterSource.createInstance('RNGParameterSource')) if DataParameterSource.datasetsAvailable and not DataParameterSource.datasetsUsed: source = ParameterSource.createInstance('CrossParameterSource', DataParameterSource.create(), source) return ParameterAdapter.createInstance(self.adapter, config, source)