def _parse_attributes(self, c): """ parse a (previously validated) valid theme file into urwid AttrSpec attributes for internal use. :param c: config object for theme file :type c: `configobj.ConfigObj` :raises: `ConfigError` """ attributes = {} for sec in c.sections: try: colours = int(sec) except ValueError: err_msg = 'section name %s is not a valid colour mode' raise ConfigError(err_msg % sec) attributes[colours] = {} for mode in c[sec].sections: attributes[colours][mode] = {} for themable in c[sec][mode].sections: block = c[sec][mode][themable] fg = block['fg'] if colours == 1: bg = 'default' else: bg = block['bg'] if colours == 256: fg = fg or c['16'][mode][themable][fg] bg = bg or c['16'][mode][themable][bg] try: att = AttrSpec(fg, bg, colours) except AttrSpecError, e: raise ConfigError(e) attributes[colours][mode][themable] = att
def _get_config_item( self, path: List[str], default: Any = None, required: bool = True, ) -> Any: """Get a config option from a path and option name, specifying whether it is required. Raises: ConfigError: If required is specified and the object is not found (and there is no default value provided), this error will be raised """ option_name = path.pop(-1) path_str = ".".join(path) # Sift through the config dicts specified by `path` to get the one containing # our option config_dict = self.config for name in path: config_dict = config_dict.get(name) if not config_dict: if required and not default: raise ConfigError(f"Config option {path_str} is required") else: config_dict = {} # Retrieve the option option = config_dict.get(option_name, default) if required and not option: raise ConfigError( f"Config option {path_str+'.'+option_name} is required") return option
def __init__(self, filepath): """ Args: filepath (str): Path to config file """ if not os.path.isfile(filepath): raise ConfigError(f"Config file '{filepath}' does not exist") # Load in the config file at the given filepath with open(filepath) as file_stream: config = yaml.full_load(file_stream.read()) # Logging setup formatter = logging.Formatter( '%(asctime)s | %(name)s [%(levelname)s] %(message)s') log_dict = config.get("logging", {}) log_level = log_dict.get("level", "INFO") logger.setLevel(log_level) file_logging = log_dict.get("file_logging", {}) file_logging_enabled = file_logging.get("enabled", False) file_logging_filepath = file_logging.get("filepath", "bot.log") if file_logging_enabled: handler = logging.FileHandler(file_logging_filepath) handler.setFormatter(formatter) logger.addHandler(handler) console_logging = log_dict.get("console_logging", {}) console_logging_enabled = console_logging.get("enabled", True) if console_logging_enabled: handler = logging.StreamHandler(sys.stdout) handler.setFormatter(formatter) logger.addHandler(handler) # Database setup database_dict = config.get("database", {}) self.database_filepath = database_dict.get("filepath") # Matrix bot account setup matrix = config.get("matrix", {}) self.user_id = matrix.get("user_id") if not self.user_id: raise ConfigError("matrix.user_id is a required field") elif not re.match("@.*:.*", self.user_id): raise ConfigError( "matrix.user_id must be in the form @name:domain") self.access_token = matrix.get("access_token") if not self.access_token: raise ConfigError("matrix.access_token is a required field") self.device_id = matrix.get("device_id", "cribbage bot") self.homeserver_url = matrix.get("homeserver_url") if not self.homeserver_url: raise ConfigError("matrix.homeserver_url is a required field") self.command_prefix = config.get("command_prefix", "!c") + " "
def _parse(self, data, entity_class, path): """ Parse entity instances from loaded YAML dictionary. :param dict data: contents of loaded YAML configuration file :param FreeIPAEntity entity_class: entity class to create instances of :param str path: configuration file path """ if not data or not isinstance(data, dict): raise ConfigError('Config must be a non-empty dictionary') parsed = [] fname = os.path.relpath(path, self.basepath) for name, attrs in data.iteritems(): self.lg.debug('Creating entity %s', name) if self.ignore and check_ignored(entity_class, name, self.ignored): self.lg.debug('Not creating ignored %s %s from %s', entity_class.entity_name, name, fname) continue entity = entity_class(name, attrs, path) if name in self.entities[entity_class.entity_name]: raise ConfigError('Duplicit definition of %s' % repr(entity)) parsed.append(entity) if len(parsed) > 1: raise ConfigError( 'More than one entity parsed from %s (%d)' % (fname, len(parsed))) for entity in parsed: self.entities[entity_class.entity_name][entity.name] = entity
def cluster(): """Executes CLUSTER and ANALYZE SQL commands on the database tables **detected_source** and **assoc_source** to re-order the data on the disk according to the Q3C spatial index. This should help with query performance. """ try: cf = sys.argv[1] except IndexError: raise ConfigError('Please provide a configuration file.') with open(cf, 'r') as stream: data = load(stream, Loader=Loader) dbname = (data['setup'])['database name'] dbusr = (data['setup'])['database user'] try: conn = psycopg2.connect(host='localhost', database=dbname, user=dbusr) except: raise ConfigError('Could not connect to database.') try: cur = conn.cursor() cur.execute( 'CLUSTER detected_source_q3c_ang2ipix_idx ON detected_source;') cur.execute('ANALYZE detected_source;') cur.execute('CLUSTER assoc_source_q3c_ang2ipix_idx ON assoc_source;') cur.execute('ANALYZE assoc_source;') cur.close() print('\ndetected_source and assoc_source tables successfully ' 'clustered and analyzed.') except: raise ConfigError('Tables could not be clustered.')
def read_config(self, path): """parse alot's config file from path""" spec = os.path.join(DEFAULTSPATH, 'alot.rc.spec') newconfig = read_config(path, spec, checks={ 'mail_container': mail_container, 'force_list': force_list, 'align': align_mode, 'attrtriple': attr_triple, 'gpg_key_hint': gpg_key }) self._config.merge(newconfig) hooks_path = os.path.expanduser(self._config.get('hooksfile')) try: self.hooks = imp.load_source('hooks', hooks_path) except: logging.debug('unable to load hooks file:%s' % hooks_path) if 'bindings' in newconfig: newbindings = newconfig['bindings'] if isinstance(newbindings, Section): self._bindings.merge(newbindings) # themes themestring = newconfig['theme'] themes_dir = self._config.get('themes_dir') if themes_dir: themes_dir = os.path.expanduser(themes_dir) else: configdir = os.environ.get('XDG_CONFIG_HOME', os.path.expanduser('~/.config')) themes_dir = os.path.join(configdir, 'alot', 'themes') logging.debug(themes_dir) # if config contains theme string use that if themestring: if not os.path.isdir(themes_dir): err_msg = 'cannot find theme %s: themes_dir %s is missing' raise ConfigError(err_msg % (themestring, themes_dir)) else: theme_path = os.path.join(themes_dir, themestring) try: self._theme = Theme(theme_path) except ConfigError as e: err_msg = 'Theme file %s failed validation:\n' raise ConfigError((err_msg % themestring) + str(e.message)) # if still no theme is set, resort to default if self._theme is None: theme_path = os.path.join(DEFAULTSPATH, 'default.theme') self._theme = Theme(theme_path) self._accounts = self._parse_accounts(self._config) self._accountmap = self._account_table(self._accounts)
def __init__(self, filepath): """ Args: filepath (str): Path to config file """ if not os.path.isfile(filepath): raise ConfigError(f"Config file '{filepath}' does not exist") # Load in the config file at the given filepath with open(filepath) as file_stream: self.config = yaml.safe_load(file_stream.read()) # Logging setup formatter = logging.Formatter('%(asctime)s | %(name)s [%(levelname)s] %(message)s') log_level = self._get_cfg(["logging", "level"], default="INFO") logger.setLevel(log_level) file_logging_enabled = self._get_cfg(["logging", "file_logging", "enabled"], default=False) file_logging_filepath = self._get_cfg(["logging", "file_logging", "filepath"], default="bot.log") if file_logging_enabled: handler = logging.FileHandler(file_logging_filepath) handler.setFormatter(formatter) logger.addHandler(handler) console_logging_enabled = self._get_cfg(["logging", "console_logging", "enabled"], default=True) if console_logging_enabled: handler = logging.StreamHandler(sys.stdout) handler.setFormatter(formatter) logger.addHandler(handler) # Storage setup self.database_filepath = self._get_cfg(["storage", "database_filepath"], required=True) self.store_filepath = self._get_cfg(["storage", "store_filepath"], required=True) # Create the store folder if it doesn't exist if not os.path.isdir(self.store_filepath): if not os.path.exists(self.store_filepath): os.mkdir(self.store_filepath) else: raise ConfigError(f"storage.store_filepath '{self.store_filepath}' is not a directory") # Matrix bot account setup self.user_id = self._get_cfg(["matrix", "user_id"], required=True) if not re.match("@.*:.*", self.user_id): raise ConfigError("matrix.user_id must be in the form @name:domain") self.user_password = self._get_cfg(["matrix", "user_password"], required=True) self.device_id = self._get_cfg(["matrix", "device_id"], required=True) self.device_name = self._get_cfg(["matrix", "device_name"], default="nio-template") self.homeserver_url = self._get_cfg(["matrix", "homeserver_url"], required=True) self.enable_encryption = self._get_cfg(["matrix", "enable_encryption"], default=False) self.command_prefix = self._get_cfg(["command_prefix"], default="!c") + " "
def read_config(configpath=None, specpath=None, checks={}): """ get a (validated) config object for given config file path. :param configpath: path to config-file :type configpath: str :param specpath: path to spec-file :type specpath: str :param checks: custom checks to use for validator. see `validate docs <http://www.voidspace.org.uk/python/validate.html>`_ :type checks: dict str->callable, :raises: :class:`~alot.settings.errors.ConfigError` :rtype: `configobj.ConfigObj` """ try: config = ConfigObj(infile=configpath, configspec=specpath, file_error=True, encoding='UTF8') except ConfigObjError as e: raise ConfigError(e) except IOError: raise ConfigError('Could not read %s and/or %s' % (configpath, specpath)) except UnboundLocalError: # this works around a bug in configobj msg = '%s is malformed. Check for sections without parents..' raise ConfigError(msg % configpath) if specpath: validator = Validator() validator.functions.update(checks) try: results = config.validate(validator, preserve_errors=True) except ConfigObjError as e: raise ConfigError(e.message) if results is not True: error_msg = '' for (section_list, key, res) in flatten_errors(config, results): if key is not None: if res is False: msg = 'key "%s" in section "%s" is missing.' msg = msg % (key, ', '.join(section_list)) else: msg = 'key "%s" in section "%s" failed validation: %s' msg = msg % (key, ', '.join(section_list), res) else: msg = 'section "%s" is missing' % '.'.join(section_list) error_msg += msg + '\n' raise ConfigError(error_msg) return config
def _generate_filename(self, entity): if entity.path: raise ConfigError( '%s already has filepath (%s)' % (entity, entity.path)) used_names = [ os.path.relpath(i.path, self.basepath) for i in self.repo_entities[entity.entity_name].itervalues()] clean_name = entity.name for char in ['.', '-', ' ']: clean_name = clean_name.replace(char, '_') fname = '%ss/%s.yaml' % (entity.entity_name, clean_name) if fname in used_names: raise ConfigError('%s filename already used' % fname) self.lg.debug('Setting %s file path to %s', entity, fname) entity.path = os.path.join(self.basepath, fname)
def _dumps_hosts(self): if not self.proxy.running(): raise ConfigError('you should run proxy first') tpl = '\n'.join([LHC_HOSTS_HEADER, '%s', LHC_HOSTS_FOOTER]) + '\n' content = tpl % '\n'.join('%s %s' % (host.proxy_ip, host.name) for host in self.hosts.values()) return content
def __init__(self, name, data, path=None): """ :param str name: entity name (user login, group name etc.) :param dict data: dictionary of entity configuration values :param str path: path to file the entity was parsed from; if None, indicates creation of entity from FreeIPA """ super(FreeIPAEntity, self).__init__() if not data: # may be None; we want to ensure dictionary data = dict() self.name = name self.path = path self.metaparams = data.pop('metaparams', dict()) if self.path: # created from local config try: self.validation_schema(data) except voluptuous.Error as e: raise ConfigError('Error validating %s: %s' % (name, e)) if not path.endswith('.yaml'): # created from template tool path, name = os.path.split(self.path) self.path = '%s.yaml' % os.path.join( path, name.replace('-', '_')) self.data_ipa = self._convert_to_ipa(data) self.data_repo = data else: # created from FreeIPA self.data_ipa = data self.data_repo = self._convert_to_repo(data)
def __init__(self, path): """ :param path: path to theme file :type path: str :raises: :class:`~alot.settings.errors.ConfigError` """ self._spec = os.path.join(DEFAULTSPATH, 'theme.spec') self._config = read_config(path, self._spec, checks={'align': align_mode, 'widthtuple': width_tuple, 'force_list': force_list, 'attrtriple': attr_triple}) self._colours = [1, 16, 256] # make sure every entry in 'order' lists have their own subsections threadline = self._config['search']['threadline'] for sec in self._config['search']: if sec.startswith('threadline'): tline = self._config['search'][sec] if tline['parts'] is not None: listed = set(tline['parts']) here = set(tline.sections) indefault = set(threadline.sections) diff = listed.difference(here.union(indefault)) if diff: msg = 'missing threadline parts: %s' % ', '.join(diff) raise ConfigError(msg)
def _get_cfg( self, path: List[str], default: Any = None, required: bool = True, ) -> Any: """Get a config option from a path and option name, specifying whether it is required. Raises: ConfigError: If required is specified and the object is not found (and there is no default value provided), this error will be raised """ # Sift through the the config until we reach our option config = self.config for name in path: config = config.get(name) # If at any point we don't get our expected option... if config is None: # Raise an error if it was required if required or not default: raise ConfigError( f"Config option {'.'.join(path)} is required") # or return the default value return default # We found the option. Return it return config
def _create_simulators(config: ConfigParser) -> List[Simulator]: simulators = [] for section in config.sections(): if not config.has_option(section, 'drivers'): msg = f'Config-file section \'{section}\' does not have an option \'drivers\'.' raise(ConfigError(msg)) drivers_list = config.get(section, 'drivers').split(',') driver_parameters: Dict[str, Any] = {} simulator = Simulator() for driver_name in drivers_list: simulator.add_driver(driver_name, driver_parameters) for option in config.options(section): for driver_name in drivers_list: if re.match(rf'{driver_name}_', option): key = re.search(f'{driver_name}_(.*)', option)[1] value = config.get(section, option) driver_parameters[key] = value simulators.append(simulator) log.debug(f'Simulator created for: {section}') return simulators
def proxy(self): if self.mode == 'docker': return ProxyDocker(self) elif self.mode == 'local': return ProxyLocal(self) else: raise ConfigError('unknown mode ' + self.mode)
def _check_memberof(self, member_of): for entity_type in member_of: try: self.get_entity_class(entity_type) except KeyError: raise ConfigError( 'Cannot be a member of non-existent entity type %s' % entity_type)
def delete_host(self, hostname): if hostname not in self.hosts: raise ConfigError('hostname not found') host = self.hosts.pop(hostname) if os.path.isfile(host._path): os.remove(host._path) if self.hosts_activated(): self.activate_hosts() return host
def __finalize(self): if not self.path_config.exists(): raise ConfigError('project is not initialized') data = json.loads(self.path_config.read_text()) name = data.get('name') simics_version = data.get('simics version') platform_name = f'{name}-{simics_version}' if platform_name != self.splatform.name: raise ConfigError( f'project already initialized for {platform_name}, do clean first' ) self.path_simics = self.path / 'simics' if not self.path_simics.exists(): raise errors.Simics('cannot find Simics executable') self.path_test_runner = self.path / 'bin' / 'test-runner' if not self.path_test_runner.exists(): raise errors.Simics('cannot find Simics test-runner executable')
def __init__(self, path_config): self.path = path_config if not self.path.exists(): raise ConfigError(f'conout config does not exist: {self.path}') data = json.loads(self.path.read_text()) self._defaults = data.pop('defaults') self._data = data self.host_type = 'win64' if platform.system( ) == 'Windows' else 'linux64' self._contour = {}
def delete_file(self): if not self.path: raise ManagerError( '%s has no file path, cannot delete.' % repr(self)) try: os.unlink(self.path) self.lg.debug('%s config file deleted', repr(self)) except OSError as e: raise ConfigError( 'Cannot delete %s at %s: %s' % (repr(self), self.path, e))
def load_config(self): """ Loads the config according to which the subcluster will be created :param str path: path to given config file :return dict of data which have been loaded :rtype dict """ self.lg.debug('Opening template config file %s', self.config_path) try: with open(self.config_path, 'r') as f: data = list(yaml.safe_load_all(f)) except IOError as e: raise ConfigError('Error reading config file %s: %s' % (self.config_path, e)) except yaml.YAMLError as e: raise ConfigError('Error parsing config file %s: %s' % (self.config_path, e)) self.lg.debug('Succesfully loaded config file %s', self.config_path) self._validate(data) return data
def get_config(config_ini='/etc/glideinwms/glidein-pilot.ini'): """ Do a minimal read of the config to identify context type and create config object for appropriate context """ if not os.path.exists(config_ini): raise ConfigError("%s does not exist" % config_ini) ini = ini_handler.Ini(config_ini) context_type = ini.get("DEFAULT", "contextualize_protocol") context = CONTEXTS.get(context_type) if context is None: raise ConfigError("context_type %s not in the supported list %s" % (context_type, valid_contexts())) config_class = '%sConfig' % context if not (config_class in globals()): raise NotImplementedError('Implementation for %s not available' % context) return (globals()[config_class])(config_ini=config_ini)
def run_yamllint_check(data): """ Run a yamllint check on parsed file contents to verify that the file syntax is correct. :param str data: contents of the configuration file to check :param yamllint.config.YamlLintConfig: yamllint config to use :raises ConfigError: in case of yamllint errors """ rules = {'extends': 'default', 'rules': {'line-length': 'disable'}} lint_errs = list(yamllint_check(data, YamlLintConfig(yaml.dump(rules)))) if lint_errs: raise ConfigError('yamllint errors: %s' % lint_errs)
def validate_chdir(val): # valid if the value is a string val = validate_string(val) # transform relative paths path = os.path.abspath(os.path.normpath(os.path.join(util.getcwd(), val))) # test if the path exists if not os.path.exists(path): raise ConfigError("can't chdir to %r" % val) return path
def validate_user(val): if val is None: return os.geteuid() if isinstance(val, int): return val elif val.isdigit(): return int(val) else: try: return pwd.getpwnam(val).pw_uid except KeyError: raise ConfigError("No such user: '******'" % val)
def from_path(source_tree, path): path_config = path / PATH_FLC_CONFIG if not path_config.exists(): raise ConfigError(f'not a Simics project: {path}') data = json.loads(path_config.read_text()) name = data.get('name') simics_version = data.get('simics version') splatform = source_tree.get_platform(name, simics_version) project = Project(splatform) return project
def _validate(self, data): """ Validates the parsed data :param list data: to be validated """ try: schema = voluptuous.Schema(schema_template) for template in data: schema(template) self.lg.debug('Successfully validated config file') except voluptuous.Error as e: raise ConfigError('Error validating config file %s: %s' % (self.config_path, e))
def validate_group(val): if val is None: return os.getegid() if isinstance(val, int): return val elif val.isdigit(): return int(val) else: try: return grp.getgrnam(val).gr_gid except KeyError: raise ConfigError("No such group: '%s'" % val)
def _load_hosts(self): self.hosts = {} if not os.path.exists(CONF_HOSTS_PATH): return for p in os.listdir(CONF_HOSTS_PATH): path = os.path.join(CONF_HOSTS_PATH, p) if not os.path.isfile(path): continue host = Host.from_path(path, g=self) if host.name in self.hosts: raise ConfigError( "duplicate hostname '%s' of file %s and %s" % (host.name, host._path, self.hosts[host.name]._path)) self.hosts[host.name] = host
def validate_file(val): if val is None: return None # valid if the value is a string val = validate_string(val) # transform relative paths path = os.path.abspath(os.path.normpath(os.path.join(util.getcwd(), val))) # test if the path exists if not os.path.exists(path): raise ConfigError("%r not found" % val) return path