def get_config_values(config_path, defaults): """Get the script's configuration values and return them in a dict If a config file exists, merge its values with the defaults. If no config file exists, just return the defaults. """ try: # For Python 3.0 and later from configparser import ConfigParser except ImportError: # Fall back to Python 2's naming from ConfigParser import SafeConfigParser as ConfigParser config = ConfigParser(defaults) config_values = config.defaults() config_paths = [ config_path, os.path.join(os.path.expanduser('~'), '.trailers.cfg'), ] config_file_found = False for path in config_paths: if os.path.exists(path): config_file_found = True config.read(path) config_values = config.defaults() break if not config_file_found: print('Config file not found. Using default values.') return config_values
def load_cake(master_config_path, cake_name, into_config=None): ''' Load a layered configuration cake by name from a master config file. All the keys and their values from the ``cake_name`` section of the ``master_config`` file will be added to the defaults dictionary of the ``into_config`` config parser before any of the layers config files are loaded. Args: master_config_path (str): the file path for the master configuration file. cake_name (str): the particular configuration to load as described in the master configuration file. This is the name of a section of the master configuration file. into_config (ConfigParser, optional): A pre-created config parser to load the cake in to. If None, a plain ``ConfigParser`` is created. Use this if want a different variant of a ``ConfigParser`` to be used; for example, if you don't want the default interpolation that a plain ``ConfigParser`` uses. Returns: A ``ConfigParser`` instance. Raises: NoSectionError: If ``cake_name`` is not found in the master config file. NoOptionError: If any required options are missing, either from the environment override section, or any individual CAKE section. ''' master_config_path = os.path.expanduser(master_config_path) config = ConfigParser() if into_config is None else into_config _must_read(config, master_config_path) section_name = cake_name filename_list = string_to_list( config.get(section_name, CONFIG_FILE_LIST_KEY)) filename_list = _filenames_relative_to(master_config_path, filename_list) # Move all the keys from the section_name of the config # into the default dictionary of the config, # making them a foundational starting point for the layers. # Since Python 2 doesn't have .read_dict() and since a pre-existing # config parser may have been passed in, this avoids any Python version # specific code, but does require keys to be lower-cased to make them findable. config.defaults().update([(k.lower(), v) for k, v in config.items(section_name)]) # Remove all irrelevant sections from the master config file for section in config.sections(): if section not in MASTER_CONFIG_SECTIONS_TO_KEEP: config.remove_section(section) for filename in filename_list: _must_read(config, filename) # The result config is used for environment variable override information # so that the environment variable override section can be defined # in any of the layers or in the master config file. _env_override(config, os.environ) return config
def get_config_values(config_path, defaults): """Get the script's configuration values and return them in a dict If a config file exists, merge its values with the defaults. If no config file exists, just return the defaults. """ config = ConfigParser(defaults) config_values = config.defaults() config_paths = [ config_path, os.path.join(os.path.expanduser('~'), '.trailers.cfg'), ] config_file_found = False for path in config_paths: if os.path.exists(path): config_file_found = True config.read(path) config_values = config.defaults() break if config_values.get('download_all_urls', ''): config_values['download_all_urls'] = ([ get_url_path(s) for s in config_values['download_all_urls'].split(',') ]) else: config_values['download_all_urls'] = [] if not config_file_found: logging.info('Config file not found. Using default values.') return config_values
def parse(self): config = ConfigParser() config.read(self.config) config_dict = {} # get defaults defaults = config.defaults() if defaults: config_dict['default'] = { key: float(value) if value != 'True' and value != 'False' else config.getboolean('DEFAULT', key) for (key, value) in config.defaults().items() } # get sections if config.sections(): for section in config.sections(): config_dict[section] = { key: float(value) if value != 'True' and value != 'False' else config.getboolean(section, key) for (key, value) in config[section].items() } try: config_dict['default'] except Exception: print("No configuration information parsed.") sys.exit() return config_dict
def cluster_swift_config_logic(hostname, filename=None): """ :return: """ status = '' message = '' resp = {"status": status, "message": message} host = SfoClusterNodesMethod.query_host_by_host_name(hostname) if not host: raise ValueError('not Node hostname is %s' % hostname) if filename: node_man = manager_class.NodeManager(host.node_inet_ip) content = node_man.swift_config.read_config( config_path=os.path.dirname(filename), config_file=os.path.basename(filename)) config = ConfigParser() config.read_string(content) config_dict = OrderedDict() try: if config.defaults(): default_config = config.defaults() config_dict[config.default_section] = default_config for section, option in config.items(): if config.has_section(section): section_copy = config._sections[section].copy() config_dict[section] = section_copy except NoSectionError, error: access_logger.error('get exception %s from swift config' % str(error)) status = 200 message = 'OK' data = {"config": config_dict}
def get_config_values(config_path, defaults): # Get settings from config file try: # For Python 3.0 and later from configparser import ConfigParser except ImportError: # Fall back for Python 2.7 naming from ConfigParser import SafeConfigParser as ConfigParser config = ConfigParser(defaults) config_values = config.defaults() config_paths = [ config_path, os.path.join(os.path.expanduser('~'), '.trailers.cfg'), ] config_file_found = False for path in config_paths: if os.path.exists(path): config_file_found = True config.read(path) config_values = config.defaults() break if not config_file_found: logging.info('Config file not found. Using default values.') return config_values
def __init__(self, env: str = "DEVELOPMENT", config_path: str = "./config.ini", project_root: str = None): """ Initialize configuration object. Configuration is read in from the path specified by the config_path argument. """ if project_root: self.project_root = project_root else: self.project_root = os.path.dirname(os.path.abspath(__file__)) if not os.path.exists(self.project_root): raise FileNotFoundError( "The path given as project root does not exist") if not os.path.isdir(self.project_root): raise NotADirectoryError( "The path given is not a directory, but a file path") if config_path: if os.path.isfile(config_path): config = ConfigParser() config.read(config_path) env_keys = [ key.rsplit('[')[0].strip() for key in config[env].keys() ] duplicate_keys = (set( [x for x in env_keys if env_keys.count(x) > 1])) defaults = list(config.defaults()) _ = [ config.defaults().pop(key) for key in defaults if key.rsplit('[')[0].strip() in duplicate_keys ] for key in config[env]: if '[float]' in key or '[double]' in key: setattr(self, key.rsplit('[')[0].strip(), float(config[env][key])) continue if '[int]' in key: setattr(self, key.rsplit('[')[0].strip(), int(config[env][key])) continue if '[path]' in key: config[env][key] = config[env][key].replace( '${project_root}', self.project_root) setattr(self, key.rsplit('[')[0].strip(), ast.literal_eval(config[env][key])) else: raise FileNotFoundError( f'No config file at location {config_path} has been found') else: raise ValueError("Please provide a config path")
def _read_config(self, config=None, bases=None): ''' Read a config file Uses ConfigParser to read in a cfg file. If a base is identified, then recursively reads in all cfg files and builds a master config dictionary object Parameters ---------- config : str Optional name of config file to load bases : list A list of parent config files Returns ------- dict A configParser dictionary object ''' # format name and file config_name = config if config.endswith('.cfg') else '{0}.cfg'.format( config) config_file = os.path.join(self.treedir, 'data', config_name) assert os.path.isfile( config_file ) is True, 'config file {0} must exist in the data directory'.format( config_file) # read initial config file cfg = SafeConfigParser() cfg.optionxform = lambda option: option cfg.read(config_file) # check for any bases bases = bases if bases else [] bases.insert(0, os.path.join(self.treedir, 'data', config_name)) hasbase = 'base' in cfg.defaults() # read base config file if hasbase: return self._read_config(cfg.defaults()['base'], bases=bases) else: # read in the full list of config files cfg = SafeConfigParser() cfg.optionxform = lambda option: option cfg.read(bases) # if both eboss and boss, then remove boss if 'EBOSS' in cfg.sections() and 'BOSS' in cfg.sections(): cfg.remove_section('BOSS') return cfg
def load(self, cfile): """How to load ini file""" cfile = Path(cfile).expanduser() if not cfile.is_file(): return {} from configparser import ConfigParser conf = ConfigParser() conf.optionxform = str # make it case-sensitive conf.read(cfile) ret = {sec: dict(conf.items(sec)) for sec in conf.sections()} defaults = conf.defaults() # section DEFAULT defaults.update(ret.get("default", {})) ret["default"] = defaults if not self.with_profile: # only default session is loaded return { key: Loader.type_cast(val) for key, val in defaults.items() } return { key: {k: Loader.type_cast(v) for k, v in val.items()} for key, val in ret.items() }
def parse_config(name: str): parser = ConfigParser(default_section='makehex', allow_no_value=True) parser.read(name if name else _DEFAULTS['config_file']) d = parser.defaults() file_handler = RotatingFileHandler( parser.get('log', 'file', fallback=_DEFAULTS['log_file']), 'a', 1024 * 1024, 5) file_handler.setLevel(parser.getint('log', 'level', fallback=3) * 10) file_handler.setFormatter( logging.Formatter( "[{levelname:^8s}|{asctime}|{module:^10s}:{lineno:>3d}|{funcName:^15}] {message}", style='{')) logging.root.addHandler(file_handler) logging.root.setLevel(0) libs = {} for library in parser["libraries"]: logging.debug("Loading library %s...", library) conf = ConfigParser() conf.read(library) for selection in conf.sections(): for k, v in conf[selection].items(): libs[k] = (os.path.join(os.path.dirname(library), selection), v) logging.debug("Loaded %s", k) return { "ENCODING": d.get('file_encoding', _DEFAULTS['file_encoding']), "ENDIAN": d.get('endian', 'little'), "ALIGN": "OFF", "INCLUDE_DIR": d.get('include_dir', _DEFAULTS['include_dir']) }, Globals(libs)
def createJiraExe(ico: str = None): call( 'C:\\Users\\Administrator\\AppData\\Local\\Programs\\Python\\Python36-32\\Scripts\\pyinstaller -w -F -i {} JiraTool.py -p AnrTool.py -p Tool --hidden-import Tool' .format(ico)) dist = sep.join(['dist', 'JiraTool.exe']) if isfile(dist): EXE_PATH = sep.join([SHARE_PATH, 'JiraTool']) print('{} isdir {}'.format(EXE_PATH, isdir(EXE_PATH))) JIRA_TOOL_PATH = sep.join([EXE_PATH, 'JiraTool']) EXE_FILE_PATH = sep.join([JIRA_TOOL_PATH, 'JiraTool.exe']) ZIP_FILE_PATH = sep.join([EXE_PATH, 'JiraTool.zip']) print('exe={} zip={}'.format(EXE_FILE_PATH, ZIP_FILE_PATH)) copyfile(dist, EXE_FILE_PATH) zip_single(JIRA_TOOL_PATH, ZIP_FILE_PATH) customerConf = ConfigParser() customerConf.read(JiraTool.VERSION_INI_FILE) defaultConf = customerConf.defaults() defaultConf['update_time'] = datetime.now().strftime( '%Y-%m-%d %H:%M:%S') defaultConf['version'] = getVersion('JiraTool.py') defaultConf['v{}'.format( defaultConf['version'])] = getUpdateContent('JiraTool.py') defaultConf['content'] = defaultConf['v{}'.format( defaultConf['version'])] customerConf.write(open(JiraTool.VERSION_INI_FILE, mode='w')) if isdir('dist'): rmtree('dist') if isdir('build'): rmtree('build')
def parse_config(name: str): parser = ConfigParser(default_section='makehex', allow_no_value=True) parser.read(name if name else _DEFAULTS['config_file']) d = parser.defaults() file_handler = RotatingFileHandler(parser.get('log', 'file', fallback=_DEFAULTS['log_file']), 'a', 1024 * 1024, 5) file_handler.setLevel(parser.getint('log', 'level', fallback=3) * 10) file_handler.setFormatter(logging.Formatter("[{levelname:^8s}|{asctime}|{module:^10s}:{lineno:>3d}|{funcName:^15}] {message}", style='{')) logging.root.addHandler(file_handler) logging.root.setLevel(0) libs = {} for library in parser["libraries"]: logging.debug("Loading library %s...", library) conf = ConfigParser() conf.read(library) for selection in conf.sections(): for k, v in conf[selection].items(): libs[k] = (os.path.join(os.path.dirname(library), selection), v) logging.debug("Loaded %s", k) return { "ENCODING": d.get('file_encoding', _DEFAULTS['file_encoding']), "ENDIAN": d.get('endian', 'little'), "ALIGN": "OFF", "INCLUDE_DIR": d.get('include_dir', _DEFAULTS['include_dir']) }, Globals(libs)
def parse_config(): """ Read credentials from INI file. """ config = ConfigParser() config.read('credentials.ini') return config.defaults()
class INImanager(ManagerBase): def __init__(self, *args, **kwargs): super(INImanager, self).__init__(*args, **kwargs) self._config = ConfigParser() @Component.dependent def save(self): self._config.read_dict(self._data) if self._config_path: with open(self._config_path, 'w') as file: self._config.write(file) elif self._config_file: self._config.write(self._config_file) else: raise FileNotFoundError() @Component.dependent def load(self): if self._config_path: with open(self._config_path) as file: self._config.read_file(file) elif self._config_file: self._config.read_file(self._config_file) else: raise FileNotFoundError() # TODO: Find a less dirty way then to use private attributes self._data = dict(self._config._sections) self._data[self._default_section] = dict(self._config.defaults())
def __init__(self): conf_fp = environ.get('PLATEMAP_CONFIG') or join( dirname(abspath(__file__)), '../platemap_config.txt') if not isfile(conf_fp): raise IOError("The configuration file '%s' is not an " "existing file" % conf_fp) config = ConfigParser() self.defaults = set(config.defaults()) # Parse the configuration file with open(conf_fp, 'U') as conf_file: config.readfp(conf_file) _expected_sections = {'main', 'postgres', 'email'} missing = _expected_sections - set(config.sections()) if missing: raise MissingConfigSection(', '.join(missing)) extra = set(config.sections()) - _expected_sections _warn_on_extra(extra, 'sections') self._get_main(config) self._get_postgres(config) self._get_email(config)
def _parse_contents(self, contents, file_type): if contents is None: return None, 'No content to parse' if file_type == 'json': try: return json.loads(contents), 'Success' except ValueError as e: return None, str(e) elif file_type == 'yaml': if yaml is None: raise ImportError('A yaml config file was specified but yaml isnt available!') try: return yaml.load(contents), 'Success' except ValueError as e: return None, str(e) elif file_type == 'ini': try: buf = StringIO(contents) config = ConfigParser() if hasattr(config, 'read_file'): config.read_file(buf) else: config.readfp(buf) data = {'defaults': dict(config.defaults())} for section in config.sections(): data[section] = dict(config.items(section)) return data, 'Success' except Exception as e: return None, str(e) else: raise ValueError('Invalid config extension: ' + file_type)
def dump(self): config = ConfigParser() module = importlib.import_module(self.inject_module) default = config.defaults() for v in [x for x in dir(module) if not x.startswith('_')]: default[v] = getattr(module, v) config.write(open(self.setting_path, 'w'))
def update_options_from_file(options, config_file_path): # type: (Namespace, str) -> Namespace config = ConfigParser() config.read(config_file_path) def _is_false(value): # type: (str) -> bool return value.lower() in {'false', 'f'} def _is_true(value): # type: (str) -> bool return value.lower() in {'true', 't'} for key, value in config.defaults().iteritems(): if _is_false(value): value = False elif _is_true(value): value = True setattr(options, key, value) for section_name in config.sections(): if (re.search(section_name, options.file) or re.search(section_name, options.file.replace('_flymake', ''))): for key, value in config.items(section_name): if _is_false(value): value = False elif _is_true(value): value = True setattr(options, key, value) if hasattr(options, 'extra_ignore_codes'): extra_ignore_codes = ( options.extra_ignore_codes.replace(',', ' ').split()) # Allow for extending, rather than replacing, ignore codes options.ignore_codes.extend(extra_ignore_codes) return options
def __init__(self): conf_fp = environ.get('AG_CONFIG') or join(dirname(abspath(__file__)), '../ag_config.txt') if not isfile(conf_fp): raise IOError("The configuration file '%s' is not an " "existing file" % conf_fp) config = ConfigParser(defaults={ 'open_humans_client_id': '', 'open_humans_client_secret': '', 'open_humans_base_url': 'https://openhumans.org', }) self.defaults = set(config.defaults()) # Parse the configuration file with open(conf_fp, 'U') as conf_file: config.readfp(conf_file) _expected_sections = {'main', 'postgres', 'test', 'redis', 'email', 'thirdparty'} missing = _expected_sections - set(config.sections()) if missing: raise MissingConfigSection(', '.join(missing)) extra = set(config.sections()) - _expected_sections _warn_on_extra(extra, 'sections') self._get_main(config) self._get_postgres(config) self._get_test(config) self._get_redis(config) self._get_email(config) self._get_third_party(config)
def parse_config(txt: str, *, interpolate: bool = True) -> Dict[str, Any]: """Parse configuration from string.""" parser = ConfigParser( interpolation=None, delimiters=["="], comment_prefixes=["#"], inline_comment_prefixes=["#"], strict=True, empty_lines_in_values=False, ) parser.optionxform = str # type: ignore parser.read_string(txt) if parser.defaults(): raise ParseError("Found config values outside of any section") result: Dict[str, Any] = {} json_parser = InterpolatingJSONDecoder( FlatDictProxy(result) if interpolate else None ) for section, values in parser.items(): if section == "DEFAULT": continue parts = section.split(".") node = result for part in parts[:-1]: if part not in node: node = node.setdefault(part, {}) else: node = node[part] if not isinstance(node, dict): raise ParseError(f"Found conflicting values for {parts}") # Set the default section node = node.setdefault(parts[-1], {}) for key, value in values.items(): # parse key if key.startswith("@"): # special @ key, values are plain unquoted strings node[key] = str(value) continue elif isintegral_str(key): # integral key, used for positional arguments pos = int(key) if pos < 0: raise ParseError(f"Negative positions are not valid: {pos}") key = str(pos) elif not str.isidentifier(key): raise ParseError(f"Key is not valid: {repr(key)}") # parse value try: parsed_value = json_parser.decode(value) except json.JSONDecodeError as err: raise ParseError( f"Error parsing value of {key}: {repr(value)}: {err}" ) from None node[key] = parsed_value return result
def generate_plan(args: Namespace, parser: ConfigParser) -> Dict[str, str]: result = DEFAULT_SETTINGS.copy() result.update(parser.defaults()) if args.config_profile is not None: result.update(parser[args.config_profile]) result['commonName'] = args.commonName result['alternativeNames'] = args.alternativeName return result
class CKANConfigLoader(object): def __init__(self, filename): self.config_file = filename.strip() self.config = dict() self.parser = ConfigParser() self.section = u'app:main' defaults = {u'__file__': os.path.abspath(self.config_file)} self._update_defaults(defaults) self._create_config_object() def _update_defaults(self, new_defaults): for key, value in new_defaults.items(): self.parser._defaults[key] = value def _read_config_file(self, filename): defaults = {u'here': os.path.dirname(os.path.abspath(filename))} self._update_defaults(defaults) self.parser.read(filename) def _update_config(self): options = self.parser.options(self.section) for option in options: if option not in self.config or option in self.parser.defaults(): value = self.parser.get(self.section, option) self.config[option] = value if option in self.parser.defaults(): self.config[u'global_conf'][option] = value def _create_config_object(self): self._read_config_file(self.config_file) # # The global_config key is to keep compatibility with Pylons. # # It can be safely removed when the Flask migration is completed. self.config[u'global_conf'] = self.parser.defaults().copy() self._update_config() schema, path = self.parser.get(self.section, u'use').split(u':') if schema == u'config': use_config_path = os.path.join( os.path.dirname(os.path.abspath(self.config_file)), path) self._read_config_file(use_config_path) self._update_config() def get_config(self): return self.config.copy()
def main(): global conf conffile = "/etc/ooni/prio.conf" cp = ConfigParser() with open(conffile) as f: cp.read_file(f) d = cp.defaults() # parsed values from DEFAULT section conf = namedtuple("Conf", d.keys())(*d.values()) bottle.run(host="localhost", port=conf.apiport)
def defaults(self): """Return the defaults, with their values interpolated (with the defaults dict itself) Mainly to support defaults using values such as %(here)s """ defaults = ConfigParser.defaults(self).copy() for key, val in defaults.items(): defaults[key] = self.get('DEFAULT', key) or val return defaults
def _load_config(args: Namespace, name: str, exclude_defaults: bool = False) -> Mapping[str, str]: config = ConfigParser() if args.config: config.read(args.config) ret = dict(config.defaults()) if config.has_section(name): ret = dict(config.items(name)) if exclude_defaults: for k in config.defaults().keys(): ret.pop(k, None) return ret require_keys = ('sqlalchemy.url', 'mq.url') for k in require_keys: if k not in ret: raise RuntimeError('config error: {} must be specified'.format(k)) return ret
def from_ini(self, _path: str) -> __class__: ini = ConfigParser() ini.read(_path) place = ini.defaults().get('place').strip('" ').split(' ') options = { 'contact_person': ini.defaults().get('contact_person').strip('" '), 'telephone': ini.defaults().get('telefone').strip('" '), 'art': ini.defaults().get('art').strip('" '), 'date': ini.defaults().get('date').strip('" '), } if ini.defaults().get('email') is not None: options['email'] = ini.defaults().get('email').strip('" ') if ini.defaults().get('job_ad') is not None: options['job_ad'] = ini.defaults().get('stellenanzeige').strip('" ') Applications.objects.create( art=Art.has(ini.defaults().get('art').strip('" ')).name, company=Company()) return self
def _parse(paths, prepos, ignored_map, ignored_location_map): """Parse files in paths to load config""" parser = SafeConfigParser() # use read_file/readfp in order to control decoding of unicode try: # Python >=3.2 read_file = parser.read_file except AttributeError: read_file = parser.readfp for p in paths: f = None try: f = io.open(_unicode_encode(p, encoding=_encodings['fs'], errors='strict'), mode='r', encoding=_encodings['repo.content'], errors='replace') except EnvironmentError: pass else: try: read_file(f) except ParsingError as e: writemsg(_unicode_decode( _("!!! Error while reading repo config file: %s\n")) % e, noiselevel=-1) finally: if f is not None: f.close() prepos['DEFAULT'] = RepoConfig("DEFAULT", parser.defaults()) for sname in parser.sections(): optdict = {} for oname in parser.options(sname): optdict[oname] = parser.get(sname, oname) repo = RepoConfig(sname, optdict) if repo.location and not os.path.exists(repo.location): writemsg(_("!!! Invalid repos.conf entry '%s'" " (not a dir): '%s'\n") % (sname, repo.location), noiselevel=-1) continue if repo.name in prepos: old_location = prepos[repo.name].location if old_location is not None and repo.location is not None and old_location != repo.location: ignored_map.setdefault(repo.name, []).append(old_location) ignored_location_map[old_location] = repo.name prepos[repo.name].update(repo) else: prepos[repo.name] = repo
def get_config(filename): config = ConfigParser() config.read_file(open(filename)) root = config.defaults() root = dict(root) #my_config_parser_dict = {s:dict(config.items(s)) for s in config.sections()} #print(my_config_parser_dict) for section in config.sections(): path = section.split('.') last = _dict_chain(root, path) last.update(config[section]) return root
def load(self): config = ConfigParser() config.read(self.setting_path) module = importlib.import_module(self.inject_module) default = config.defaults() for key in default: print(f'{key}:{default[key]}') if not hasattr(module, key): continue value: str = default[key] if value.isdigit(): value = float(value) setattr(module, key, value)
def read_cfg(cfg_file, include_default=True): """ Parse a config file. Parameters ---------- cfg_file: string filename of the config file include_default: boolean, optional If true then the DEFAULT section settings will be included in all other sections. """ config = ConfigParser() config.optionxform = str config.read(cfg_file) ds = {} for section in config.sections(): ds[section] = {} for item, value in config.items(section): if 'path' in item: value = value.replace(' ', '') path = value.split(',') if path[0][0] == '.': # relative path value = os.path.join(os.path.split(cfg_file)[0], *path[0:]) elif path[0][0] == '/' or path[0][1] == ':': # absolute path in linux or windows value = os.path.sep.join(path) else: print(section, item, ' got a blank value, set to None') value = None if item.startswith('kws'): if item[4:] == 'custom_dtype': value = {item[4:]: eval(value)} else: value = {item[4:]: value} item = 'kws' if include_default or item not in config.defaults().keys(): if item == 'kws': ds[section][item].update(value) else: ds[section][item] = value return ds
def _parse(paths, prepos, ignored_map, ignored_location_map): """Parse files in paths to load config""" parser = SafeConfigParser() # use read_file/readfp in order to control decoding of unicode try: # Python >=3.2 read_file = parser.read_file except AttributeError: read_file = parser.readfp for p in paths: f = None try: f = io.open(_unicode_encode(p, encoding=_encodings['fs'], errors='strict'), mode='r', encoding=_encodings['repo.content'], errors='replace') except EnvironmentError: pass else: try: read_file(f) except ParsingError as e: writemsg(_unicode_decode( _("!!! Error while reading repo config file: %s\n") ) % e, noiselevel=-1) finally: if f is not None: f.close() prepos['DEFAULT'] = RepoConfig("DEFAULT", parser.defaults()) for sname in parser.sections(): optdict = {} for oname in parser.options(sname): optdict[oname] = parser.get(sname, oname) repo = RepoConfig(sname, optdict) if repo.location and not os.path.exists(repo.location): writemsg(_("!!! Invalid repos.conf entry '%s'" " (not a dir): '%s'\n") % (sname, repo.location), noiselevel=-1) continue if repo.name in prepos: old_location = prepos[repo.name].location if old_location is not None and repo.location is not None and old_location != repo.location: ignored_map.setdefault(repo.name, []).append(old_location) ignored_location_map[old_location] = repo.name prepos[repo.name].update(repo) else: prepos[repo.name] = repo
def decode(self, s, **kwargs): parser = ConfigParser(**kwargs) if PY2: parser.readfp(StringIO(s)) else: parser.read_string(s) data = {} for option, _ in parser.defaults().items(): data[option] = self._get_section_option_value( parser, default_section, option) for section in parser.sections(): data[section] = {} for option, _ in parser.items(section): data[section][option] = self._get_section_option_value( parser, section, option) return data
def get_options_from_file(file_path): # type: (str) -> Dict[str, Any] """Parse options from the config file at `file_path` and return them as a dict""" parsed_options = {} # type: Dict[str, Any] config = ConfigParser() config.read(file_path) # [DEFAULT] section for key, value in config.defaults().items(): if is_false(value): value = False elif is_true(value): value = True parsed_options[key] = value # NOTE: removed support for per-file config file sections, as I don't think # they were being used. return parsed_options
def eclass_doc_check(): """Check eclass documentation syntax.""" p = Popen(['portageq', 'repos_config', '/'], stdout=PIPE) p.wait() conf = ConfigParser() conf.readfp(p.stdout) portdir = conf.get(conf.defaults()['main-repo'], 'location') awk_file = portdir + '/' + \ 'app-portage/eclass-manpages/files/eclass-to-manpage.awk' eclasses = glob('eclass/*.eclass') for eclass in eclasses: proc = Popen(['gawk', '-f', awk_file], stdin=PIPE, stdout=PIPE, stderr=PIPE) _, err = proc.communicate(open(eclass, 'rb').read()) if err: print(warn('>>> %s' % eclass)) print(err)
def __init__(self, logger=None, system_type=None, host_name=None, default=None): # type: (Optional[logging.Logger], Optional[str], Optional[str], Optional[Iterable]) -> None """Initialize a ConfigBase object If a system_type is supplied, such as "gateway", "client", or "engine", configuration options for that specific system type override options in the [DEFAULT] section. Similarly, if a host name is supplied, the options under the section of that specific host name override other options. A derivative class usually only needs to implement get_config(). :param logger: a logger for logging config parsing-related information, None can be passed to disable logging. :param system_type: the type of the system to read configuration for :param host_name: the host name of the system :param default: a SectionProxy or any dict-like object that provides default values """ self._logger = logger self._system_type = system_type self._host_name = host_name # Load default values cp = ConfigParser() default_config_file = os.path.join( os.path.dirname(os.path.abspath(__file__)), 'default.conf') loaded_files = cp.read(default_config_file) if len(loaded_files) == 0: self.log( logging.WARNING, 'Failed to load default config file: ' + default_config_file) self._config = None if default is not None: self._config = default else: self.log(logging.DEBUG, 'Loaded default config file ' + str(loaded_files)) self._config = cp.defaults() if default is not None: self._config.update(default)
def updateExe(): update = False version = '' content = '' if isfile(VERSION_INI_FILE): customerConf = ConfigParser() customerConf.read(VERSION_INI_FILE) defaultConf = customerConf.defaults() def versionToInt(version: str): vint = 0 for istr in version.split('.'): vint = vint * 1000 + int(istr) return vint if 'version' in defaultConf: versionToInt(defaultConf['version']) remote_version = versionToInt(defaultConf['version']) current_version = versionToInt(CURRENT_VERSION) if remote_version > current_version: update = True version = 'v{}'.format(defaultConf['version']) if 'content' in defaultConf: content = defaultConf['content'] if update: ret = tk.messagebox.askquestion(title='新版本更新', message='是否更新版本{}?\n\n{}'.format( version, content)) if ret == 'yes' or str(ret) == 'True': file_path = askdirectory() bar = widget.GressBar() def copyTool(): zip_file = sep.join([file_path, 'WorkTool.zip']) copyfile(EXE_PATH + 'WorkTool.zip', zip_file) time.sleep(3) if isfile(zip_file): startfile(zip_file) else: tk.messagebox.showinfo(title='提示', message='下载失败!') bar.quit() WorkThread(action=copyTool).start() bar.start('更新软件', '正在下载......')
class Config: def __init__(self): self._parser = ConfigParser(empty_lines_in_values=False) self._parser._defaults = IncludeDict(self._parser) # Copy attributes from the parser to avoid one additional # function call on each access. for attr in ["has_section", "remove_section"]: setattr(self, attr, getattr(self._parser, attr)) def read_configs(self, configs): for config in configs: match = re.match("(.*)/([^/]+)=(.*)", config) if not match: raise Exception("Invalid config string: %s" % config) (name, option, value) = match.groups() # Match section names name_regex = re.compile(name) sections = [section for section in self._parser.sections() if name_regex.match(section)] if not sections: self._parser.add_section(name) sections.append(name) for section in sections: logging.debug('Setting configuration parameter: ' '%(section)s/%(option)s = %(value)s' % {'section': section, 'option': option, 'value': value}) self._parser.set(section, option, value) def read_file(self, file, filename="<stream>"): logging.info("Reading configurations from: %s", filename) self._parser.read_file(file, filename) def read_filename(self, filename): if not os.path.exists(filename): raise Exception("No such configuration file: %s" % filename) file = open(filename, "r") return self.read_file(file, filename) def get_defaults(self): attributes = self._parser.defaults() return ConfigDefaults(self, 'DEFAULT', attributes) def get_section(self, name): if self._parser.has_section(name): attributes = dict(self._parser.items(name)) return ConfigSection(self, name, attributes) return None def get_section_names(self): return self._parser.sections() def add_section(self, name): self._parser.add_section(name) return self.get_section(name)
from configparser import ConfigParser from datetime import timedelta, datetime import email import re import os import getpass import zipfile import shutil from multiprocessing.dummy import Pool as ThreadPool, Queue from multiprocessing import Pool as ProcessPool import sys # Parameters config = ConfigParser() config.read("gsm_config.ini") mail_folder = config.defaults()["mail_folder"] mail_passwd = config.defaults()["mail_passwd"] mail_server = config.defaults()["mail_server"] mail_user = config.defaults()["mail_user"] username = getpass.getuser() ptt_pattern = re.compile("[0]*(?P<platform>[0-9]+)g") dest_path = os.path.join( os.path.expanduser("~%s" % username), "Desktop", "import_gsm_" + datetime.today().strftime("%Y-%m-%d") ) # Maximum number of concurrent connections max_connections = 8 # Globals connection_queue = Queue()
def _parse(paths, prepos, ignored_map, ignored_location_map, local_config, portdir): """Parse files in paths to load config""" parser = SafeConfigParser() # use read_file/readfp in order to control decoding of unicode try: # Python >=3.2 read_file = parser.read_file source_kwarg = 'source' except AttributeError: read_file = parser.readfp source_kwarg = 'filename' recursive_paths = [] for p in paths: if isinstance(p, basestring): recursive_paths.extend(_recursive_file_list(p)) else: recursive_paths.append(p) for p in recursive_paths: if isinstance(p, basestring): f = None try: f = io.open(_unicode_encode(p, encoding=_encodings['fs'], errors='strict'), mode='r', encoding=_encodings['repo.content'], errors='replace') except EnvironmentError: pass else: # The 'source' keyword argument is needed since otherwise # ConfigParser in Python <3.3.3 may throw a TypeError # because it assumes that f.name is a native string rather # than binary when constructing error messages. kwargs = {source_kwarg: p} read_file(f, **portage._native_kwargs(kwargs)) finally: if f is not None: f.close() elif isinstance(p, io.StringIO): kwargs = {source_kwarg: "<io.StringIO>"} read_file(p, **portage._native_kwargs(kwargs)) else: raise TypeError("Unsupported type %r of element %r of 'paths' argument" % (type(p), p)) prepos['DEFAULT'] = RepoConfig("DEFAULT", parser.defaults(), local_config=local_config) for sname in parser.sections(): optdict = {} for oname in parser.options(sname): optdict[oname] = parser.get(sname, oname) repo = RepoConfig(sname, optdict, local_config=local_config) # Perform repos.conf sync variable validation portage.sync.validate_config(repo, logging) # For backward compatibility with locations set via PORTDIR and # PORTDIR_OVERLAY, delay validation of the location and repo.name # until after PORTDIR and PORTDIR_OVERLAY have been processed. prepos[sname] = repo
class WorkEnv(singleton.BaseSingleton): """ A class which include each kind of directory, config files and etc. Parameters ---------- work_dir: str, optional Specify work directory. If not provided, it would find .ngta's dir as work dir. """ lock = threading.Lock() def __init__(self, work_dir: str = None) -> None: self.work_dir = work_dir or self.find_work_dir(self.cwd) self.ini = ConfigParser() if self.work_dir: self.ini.read(os.path.join(self.work_dir, ".%s" % PACKAGE_NAME)) self.work_dir = os.path.abspath(self.work_dir) self.case_dir = os.path.join(self.work_dir, CASE_DIR_BASENAME) self.libs_dir = os.path.join(self.work_dir, LIBS_DIR_BASENAME) self.logs_dir = os.path.join(self.work_dir, LOGS_DIR_BASENAME) self.prepare_sys_path() else: self.case_dir = None self.libs_dir = None self.logs_dir = None def load_config_from_anchor(self): """ Load configurations from .ngta file. """ if self.work_dir: defaults = self.ini.defaults() cases_dir = defaults.get("cases_dir", None) if cases_dir: if not os.path.isabs(self.case_dir): self.case_dir = os.path.abspath(os.path.join(self.work_dir, self.case_dir)) else: self.case_dir = os.path.join(self.work_dir, CASE_DIR_BASENAME) pre_imports = defaults.get('pre_imports', None) if pre_imports: module_names = pre_imports.split(',') for module_name in module_names: stripped = module_name.strip() if stripped: importlib.import_module(stripped) @property def cwd(self) -> str: return os.getcwd() def prepare_sys_path(self) -> None: """ Add lib and cases dir into sys.path """ if self.work_dir: sys.path.insert(0, self.libs_dir) sys.path.insert(0, self.case_dir) def get_output_dir(self) -> typing.Optional[str]: if self.work_dir: return os.path.join(self.work_dir, LOGS_DIR_BASENAME, time.strftime("%Y-%m-%d_%H-%M-%S")) return None @classmethod def find_work_dir(cls, current_dir: str = os.getcwd()) -> typing.Optional[str]: """ Find work dir from cwd and its ancestors by locate .ngta file. """ for name in os.listdir(current_dir): if name == ".{}".format(PACKAGE_NAME): return current_dir parent_dir = os.path.dirname(current_dir) if os.path.samefile(parent_dir, current_dir): return None else: return cls.find_work_dir(parent_dir) @classmethod def init(cls, dest_dir: str, include_sample: bool = True) -> None: """ Init destination dir as work dir. Parameters ---------- dest_dir: str Specify destination dir. include_sample: bool, optional Whether include sample cases when initialize work dir. """ logging.basicConfig(level=logging.DEBUG) fs.mkdirs(dest_dir) ngta_dir = os.path.dirname(__file__) init_dir = os.path.join(ngta_dir, INIT_DIR_BASENAME) conf_dir = os.path.join(init_dir, CONF_DIR_BASENAME) exclude = None if include_sample else 'sample' fs.copy(init_dir, dest_dir, exclude) fs.mkdirs(os.path.join(dest_dir, LIBS_DIR_BASENAME)) fs.mkdirs(os.path.join(dest_dir, LOGS_DIR_BASENAME)) for src_conf in fs.find(conf_dir, top_dir=False): src_base = os.path.basename(src_conf) dst_conf = os.path.join(dest_dir, CONF_DIR_BASENAME, src_base.replace("_sample", "")) if os.path.exists(dst_conf): _diff(src_conf, dst_conf) else: fs.copy(src_conf, dst_conf)
import shutil import nose for path in pathlib.Path("tests").glob("*"): if path.is_dir(): shutil.rmtree(str(path)) cov = pathlib.Path(".coverage") if cov.exists(): cov.unlink() nose.run(argv=['nosetests']) setup( name=config.defaults().get('module'), version=version, description=('ftp client/server for asyncio'), long_description='\n\n'.join(read('readme.rst')), classifiers=config.defaults().get('classifiers').splitlines(), author=config.defaults().get('author'), author_email=config.defaults().get('author-email'), url=config.defaults().get('home-page'), license='Apache 2', packages=find_packages(), install_requires=[], tests_require=['nose', 'coverage'], cmdclass={'test': NoseTestCommand}, include_package_data=True )
def _parse(paths, prepos, ignored_map, ignored_location_map, local_config, portdir): """Parse files in paths to load config""" parser = SafeConfigParser() # use read_file/readfp in order to control decoding of unicode try: # Python >=3.2 read_file = parser.read_file source_kwarg = 'source' except AttributeError: read_file = parser.readfp source_kwarg = 'filename' recursive_paths = [] for p in paths: if isinstance(p, basestring): recursive_paths.extend(_recursive_file_list(p)) else: recursive_paths.append(p) for p in recursive_paths: if isinstance(p, basestring): f = None try: f = io.open(_unicode_encode(p, encoding=_encodings['fs'], errors='strict'), mode='r', encoding=_encodings['repo.content'], errors='replace') except EnvironmentError: pass else: # The 'source' keyword argument is needed since otherwise # ConfigParser in Python <3.3.3 may throw a TypeError # because it assumes that f.name is a native string rather # than binary when constructing error messages. kwargs = {source_kwarg: p} read_file(f, **portage._native_kwargs(kwargs)) finally: if f is not None: f.close() elif isinstance(p, io.StringIO): kwargs = {source_kwarg: "<io.StringIO>"} read_file(p, **portage._native_kwargs(kwargs)) else: raise TypeError("Unsupported type %r of element %r of 'paths' argument" % (type(p), p)) prepos['DEFAULT'] = RepoConfig("DEFAULT", parser.defaults(), local_config=local_config) for sname in parser.sections(): optdict = {} for oname in parser.options(sname): optdict[oname] = parser.get(sname, oname) repo = RepoConfig(sname, optdict, local_config=local_config) if repo.sync_type is not None and repo.sync_uri is None: writemsg_level("!!! %s\n" % _("Repository '%s' has sync-type attribute, but is missing sync-uri attribute") % sname, level=logging.ERROR, noiselevel=-1) continue if repo.sync_uri is not None and repo.sync_type is None: writemsg_level("!!! %s\n" % _("Repository '%s' has sync-uri attribute, but is missing sync-type attribute") % sname, level=logging.ERROR, noiselevel=-1) continue if repo.sync_type not in (None, "cvs", "git", "rsync"): writemsg_level("!!! %s\n" % _("Repository '%s' has sync-type attribute set to unsupported value: '%s'") % (sname, repo.sync_type), level=logging.ERROR, noiselevel=-1) continue if repo.sync_type == "cvs" and repo.sync_cvs_repo is None: writemsg_level("!!! %s\n" % _("Repository '%s' has sync-type=cvs, but is missing sync-cvs-repo attribute") % sname, level=logging.ERROR, noiselevel=-1) continue # For backward compatibility with locations set via PORTDIR and # PORTDIR_OVERLAY, delay validation of the location and repo.name # until after PORTDIR and PORTDIR_OVERLAY have been processed. prepos[sname] = repo
from time import sleep try: # PY3 from configparser import ConfigParser except ImportError: from ConfigParser import SafeConfigParser as ConfigParser from foscam.foscam import FoscamCamera, FOSCAM_SUCCESS config = ConfigParser() config_filepath = os.path.join(os.path.dirname(__file__), 'camtest.cfg') if os.path.exists(config_filepath): config.read([config_filepath]) config_defaults = config.defaults() CAM_HOST = config_defaults.get('host') or '' CAM_PORT = config_defaults.get('port') or 88 CAM_USER = config_defaults.get('user') or 'admin' CAM_PASS = config_defaults.get('pass') or 'foscam' CAM_WIFI_SSID = config_defaults.get('wifi_ssid') or '' CAM_WIFI_PASS = config_defaults.get('wifi_pass') or '' class CallbackForTest(object): def __call__(self, *args, **kwargs): self.args = args self.kwargs = kwargs