def fill(self, container): searchPaths = [] for configFile in self._configFiles: configContent = {} searchPaths.extend( self._fillContentWithIncludes(configFile, [os.getcwd()], configContent)) # Store config settings for section in configContent: # Allow very basic substitutions with %(option)s syntax def getOptValue(option, value, source): return (option, value) substDict = dict( ichain([ ismap(getOptValue, configContent.get('default', [])), ismap(getOptValue, configContent.get(section, [])) ])) for (option, value, source) in configContent[section]: # Protection for non-interpolation "%" in value try: value = ( value.replace('%', '\x01').replace('\x01(', '%(') % substDict).replace('\x01', '%') except Exception: raise ConfigError( 'Unable to interpolate value %r with %r' % (value, substDict)) self._addEntry(container, section, option, value, source) searchString = str.join(' ', UniqueList(searchPaths)) if self._addSearchPath: self._addEntry(container, 'global', 'plugin paths+', searchString, str.join(',', self._configFiles))
def _build_blocks(self, map_key2fm_list, map_key2name, map_key2metadata_dict): # Return named dataset for key in sorted(map_key2fm_list): result = { DataProvider.Dataset: map_key2name[key[:1]], DataProvider.BlockName: map_key2name[key[:2]], } fm_list = map_key2fm_list[key] # Determine location_list location_list = None for file_location_list in ifilter(lambda s: s is not None, imap(itemgetter(3), fm_list)): location_list = location_list or [] location_list.extend(file_location_list) if location_list is not None: result[DataProvider.Locations] = list(UniqueList(location_list)) # use first file [0] to get the initial metadata_dict [1] metadata_name_list = list(fm_list[0][1].keys()) result[DataProvider.Metadata] = metadata_name_list # translate file metadata into data provider file info entries def _translate_fm2fi(url, metadata_dict, entries, location_list, obj_dict): if entries is None: entries = -1 return {DataProvider.URL: url, DataProvider.NEntries: entries, DataProvider.Metadata: lmap(metadata_dict.get, metadata_name_list)} result[DataProvider.FileList] = lsmap(_translate_fm2fi, fm_list) yield result
def resolve_paths(path, search_path_list=None, must_exist=True, exception_type=PathError): path = clean_path(path) # replace $VAR, ~user, \ separators result = [] if os.path.isabs(path): result.extend(sorted( glob.glob(path))) # Resolve wildcards for existing files if not result: if must_exist: raise exception_type('Could not find file "%s"' % path) return [path] # Return non-existing, absolute path else: # search relative path in search directories search_path_list = search_path_list or [] for spath in UniqueList(search_path_list): result.extend( sorted(glob.glob(clean_path(os.path.join(spath, path))))) if not result: if must_exist: raise exception_type( 'Could not find file "%s" in \n\t%s' % (path, str.join('\n\t', search_path_list))) return [path] # Return non-existing, relative path return result
def logging_create_handlers(config, logger_name): # Configure general setup of loggers - destinations, level and propagation logger = logging.getLogger(logger_name.lower().replace('exception', 'abort').replace('root', '')) # Setup handlers handler_list = config.get_list(logger_name + ' handler', [], on_change=None) if handler_list: # remove any standard handlers: for handler in list(logger.handlers): logger.removeHandler(handler) else: for handler in logger.handlers: logging_configure_handler(config, logger_name, '', handler) for handler_str in UniqueList(handler_list): # add only unique output handlers if handler_str == 'stdout': handler = StdoutStreamHandler() elif handler_str == 'stderr': handler = StderrStreamHandler() # pylint:disable=redefined-variable-type elif handler_str == 'file': handler = logging.FileHandler(config.get(logger_name + ' file', on_change=None), 'w') elif handler_str == 'debug_file': handler = GCLogHandler(config.get_fn_list(logger_name + ' debug file', get_debug_file_candidates(), on_change=None, must_exist=False), 'w') else: raise Exception('Unknown handler %s for logger %s' % (handler_str, logger_name)) logger.addHandler(logging_configure_handler(config, logger_name, handler_str, handler)) logger.propagate = False # Set propagate status logger.propagate = config.get_bool(logger_name + ' propagate', bool(logger.propagate), on_change=None) # Set logging level logger.setLevel(config.get_enum(logger_name + ' level', LogLevelEnum, logger.level, on_change=None))
def _buildBlocks(self, protoBlocks, hashNameDictDS, hashNameDictB): # Return named dataset for hashDS in sorted(protoBlocks): for hashB in sorted(protoBlocks[hashDS]): blockSEList = None for seList in ifilter( lambda s: s is not None, imap(lambda x: x[3], protoBlocks[hashDS][hashB])): blockSEList = blockSEList or [] blockSEList.extend(seList) if blockSEList is not None: blockSEList = list(UniqueList(blockSEList)) metaKeys = protoBlocks[hashDS][hashB][0][1].keys() def fnProps(path, metadata, events, seList, objStore): if events is None: events = -1 return { DataProvider.URL: path, DataProvider.NEntries: events, DataProvider.Metadata: lmap(metadata.get, metaKeys) } yield { DataProvider.Dataset: hashNameDictDS[hashDS], DataProvider.BlockName: hashNameDictB[hashB][1], DataProvider.Locations: blockSEList, DataProvider.Metadata: list(metaKeys), DataProvider.FileList: lsmap(fnProps, protoBlocks[hashDS][hashB]) }
def __init__(self, filler=None, configFilePath=None): def getName(prefix=''): if configFilePath: return ('%s.%s' % (prefix, utils.getRootName(configFilePath))).strip('.') elif prefix: return prefix return 'unnamed' pathMain = os.getcwd() if configFilePath: pathMain = os.path.dirname( utils.resolvePath(configFilePath, searchPaths=[os.getcwd()], ErrorClass=ConfigError)) # Init config containers self._curContainer = ConfigContainer('current') if filler: # Read in the current configuration ... filler.fill(self._curContainer) logging.getLogger('config.stored').propagate = False oldContainer = ConfigContainer('stored') oldContainer.enabled = False # Create config view and temporary config interface self._view = SimpleConfigView(getName(), oldContainer, self._curContainer) self._view.pathDict['search_paths'] = UniqueList( [os.getcwd(), pathMain]) # Determine work directory using config interface with "global" scope tmpInterface = SimpleConfigInterface( self._view.getView(setSections=['global'])) wdBase = tmpInterface.getPath('workdir base', pathMain, mustExist=False) pathWork = tmpInterface.getPath('workdir', os.path.join(wdBase, getName('work')), mustExist=False) self._view.pathDict[ '<WORKDIR>'] = pathWork # tmpInterface still has undefinied # Set dynamic plugin search path sys.path.extend(tmpInterface.getPaths('plugin paths', [os.getcwd()])) # Determine and load stored config settings self._flatCfgPath = os.path.join(pathWork, 'current.conf') # Minimal config file self._oldCfgPath = os.path.join( pathWork, 'work.conf') # Config file with saved settings if os.path.exists(self._oldCfgPath): GeneralFileConfigFiller([self._oldCfgPath]).fill(oldContainer) CompatConfigFiller(os.path.join(pathWork, 'task.dat')).fill(oldContainer) oldContainer.enabled = True oldContainer.setReadOnly() # Get persistent variables - only possible after oldContainer was enabled self._view.setConfigName( tmpInterface.get('config id', getName(), persistent=True))
def resolve_install_path(path): os_path_list = UniqueList(os.environ['PATH'].split(os.pathsep)) result = resolve_paths(path, os_path_list, True, PathError) result_exe = lfilter(lambda fn: os.access(fn, os.X_OK), result) # filter executable files if not result_exe: raise PathError('Files matching %s:\n\t%s\nare not executable!' % (path, str.join('\n\t', result))) return result_exe[0]
def __init__(self, filler=None, config_file_path=None, load_old_config=True, path_base=None): def _get_name(prefix=''): if config_file_path: return ('%s.%s' % (prefix, get_file_name(config_file_path))).strip('.') elif prefix: return prefix return 'unnamed' try: config_dn = os.getcwd() except Exception: raise ConfigError('The current directory does not exist!') if config_file_path: config_dn = os.path.dirname(resolve_path(config_file_path, search_path_list=[os.getcwd()], exception_type=ConfigError)) if path_base: config_dn = os.path.dirname(resolve_path(path_base, search_path_list=[os.getcwd()])) # Init config containers self._container_cur = ConfigContainer('current') if filler: # Read in the current configuration ... filler.fill(self._container_cur) self._container_cur.resolve() # resolve interpolations logging.getLogger('config.stored').propagate = False container_old = ConfigContainer('stored') container_old.enabled = False # Create config view and temporary config interface self._view = SimpleConfigView(_get_name(), container_old, self._container_cur) self._view.config_vault['path:search'] = UniqueList([os.getcwd(), config_dn]) # Determine work directory using config interface with "global" scope tmp_config = SimpleConfigInterface(self._view.get_view(set_sections=['global'])) work_dn_base = tmp_config.get_dn('workdir base', config_dn, must_exist=False) work_dn_default = os.path.join(work_dn_base, _get_name('work')) work_dn = tmp_config.get_dn('workdir', work_dn_default, must_exist=False) self._view.config_vault['path:work_dn'] = work_dn # tmp_config still has undefinied # Set dynamic plugin search path sys.path.extend(tmp_config.get_dn_list('plugin paths', [os.getcwd()])) # Determine and load stored config settings self._config_path_min = os.path.join(work_dn, 'current.conf') # Minimal config file self._config_path_old = os.path.join(work_dn, 'work.conf') # Config file with saved settings if load_old_config: if os.path.exists(self._config_path_old): GeneralFileConfigFiller([self._config_path_old]).fill(container_old) old_setting_file = os.path.join(work_dn, 'task.dat') if os.path.exists(old_setting_file): ConfigFiller.create_instance('CompatConfigFiller', old_setting_file).fill(container_old) container_old.enabled = True container_old.protect() # Get persistent variables - only possible after container_old was enabled self._view.set_config_name(tmp_config.get('config id', _get_name(), persistent=True))
def fill(self, container): searchPaths = [] for configFile in self._configFiles: configContent = {} searchPaths.extend(self._fillContentWithIncludes(configFile, [os.getcwd()], configContent)) # Store config settings for section in configContent: for (option, value, source) in configContent[section]: self._addEntry(container, section, option, value, source) searchString = str.join(' ', UniqueList(searchPaths)) if self._addSearchPath: self._addEntry(container, 'global', 'plugin paths+', searchString, str.join(',', self._configFiles))
def fill(self, container): search_path_list = [] for config_fn in self._config_fn_list: content_configfile = {} search_path_list.extend( self._fill_content_deep(config_fn, [os.getcwd()], content_configfile)) # Store config settings for section in content_configfile: for (option, value, source) in content_configfile[section]: self._add_entry(container, section, option, value, source) search_path_str = str.join(' ', UniqueList(search_path_list)) if self._add_search_path: plugin_paths_source = str.join(',', self._config_fn_list) self._add_entry(container, 'global', 'plugin paths+', search_path_str, plugin_paths_source)
def require_storage(self, se_list): self.storage = UniqueList(self.storage + (se_list or []))
def require_software(self, value): if value is not None: self.software = UniqueList(self.software + [value])