def read_raw_parser(self, filename): blocks_set = [] file_parser = RawConfigParser() if os.getenv("PINGUINO_PYTHON") is "2": if type(filename) in [str, unicode]: file_parser.readfp(codecs.open(filename, "r", encoding="utf-8")) else: file_parser = filename elif os.getenv("PINGUINO_PYTHON") is "3": if type(filename) == str: file_parser.readfp(codecs.open(filename, "r", encoding="utf-8")) else: file_parser = filename sections = file_parser.sections() for section in sections: options = file_parser.options(section) block = {} for option in options: file_parser.get(section, option) value = file_parser.get(section, option) if os.getenv("PINGUINO_PYTHON") is "2": if (type(value) in [str, unicode]) and (value[0] in ["[", "("]): block[option] = eval(file_parser.get(section, option)) else: block[option] = file_parser.get(section, option) elif os.getenv("PINGUINO_PYTHON") is "3": if (type(value) == str) and (value[0] in ["[", "("]): block[option] = eval(file_parser.get(section, option)) else: block[option] = file_parser.get(section, option) blocks_set.append(block) return blocks_set
def run(self): try: config_file = DEFAULT_CONF_FILE if not os.path.isfile(config_file): raise ValueError('Configuration file not found: {0}'.format(config_file)) config = RawConfigParser() config.read(config_file) if 'security' in config.sections(): return 'The configuration file is already migrated to the new version' config.add_section('security') config.add_section('database') for item in config.items('paths'): if item[0] == 'database_file': config.set('database', item[0], item[1]) else: config.set('security', item[0], item[1]) config.remove_section('paths') config.set('security', 'crl_file_url', 'None') config.set('logging', 'log_level', 'INFO') with open(config_file, 'w') as file: config.write(file) except Exception as exc: return exc return 'Configuration file migrated'
def main(): logging.basicConfig(level=logging.INFO) args = docopt(__doc__, version='PeP et Al. emails v0.0.1') database = read_database(args['<database>']) config = RawConfigParser() successful_files = config.read(args['--config']) if not successful_files: raise IOError('Could not read config-file') backend = args['--backend'] or config.sections()[0] if backend == 'smtplib': from .backends import SMTPLibMailer mailer = SMTPLibMailer(**config['smtplib']) elif backend == 'mailgun': from .backends import MailgunMailer mailer = MailgunMailer(**config['mailgun']) else: raise ValueError('Unsupported backend: {}'.format(args['--backend'])) template, metadata = parse_template(args['<template>']) for recipient in database.itertuples(): markdown = template.render(recipient=recipient, metadata=metadata) html = gfm.markdown(markdown) mailer.send_mail( recipient, metadata, markdown, html=html, attachments=metadata.get('attachments') )
def read_configfile(): global cfg, DEBUG,DOVECOT,GPGMAILENCRYPT,MAILDIRLOCK cfg=dict() _cfg = RawConfigParser() try: _cfg.read(CONFIGFILE) except: log("Could not read config file '%s'."%CONFIGFILE,"e",ln=lineno()) return for sect in _cfg.sections(): cfg[sect] = dict() for (name, value) in _cfg.items(sect): cfg[sect][name] = value if 'default' in cfg: if 'gpgmailencrypt' in cfg['default']: GPGMAILENCRYPT=cfg['default']['gpgmailencrypt'] if 'mail' in cfg: if 'dovecot' in cfg['mail'] and cfg['mail']['dovecot']=="yes": DOVECOT=True if 'maildirlock' in cfg['mail']: MAILDIRLOCK=cfg['mail']['maildirlock']
def populate_config_dict(config_path): """Load the configuration file into the config_file dictionary A ConfigParser-style configuration file can have multiple sections, but we ignore the section distinction and load the key/value pairs from all sections into a single key/value list. """ try: config_dict = {} parser = RawConfigParser() parser.optionxform = lambda x: x parser.read(config_path) sections = parser.sections() for section in sections: options = parser.options(section) for option in options: config_dict[option] = str(parser.get(section, option)) except Exception as e: logger.warning("Could not load configuration file due to exception. " "Only environment variable equivalents will be used.") return None for key in config_dict.keys(): if config_dict[key] == '': config_dict[key] = None elif isinstance(config_dict[key], str): config_dict[key] = os.path.expanduser(config_dict[key]) return config_dict
def read_merged(self, filenames, encoding=None): cfg = [] for filename in filenames: _cfg = RawConfigParser() _cfg.read(filename) cfg.append(_cfg) for _cfg in cfg: for section in _cfg.sections(): if not self.has_section(section): self.add_section(section) for option in _cfg.options(section): value = _cfg.get(section, option) if ";" in value: current = self.getdefault(section, option, "") if ";" in current: val = [] for v in value.split(";"): if v and v not in val: val.append(v) for v in self.getlist(section, option): if v and v not in val: val.append(v) self.set(section, option, ";".join(val) + ";") continue self.set(section, option, value)
def read_config(self, config): result = [] stack = [config] while 1: config = stack.pop() src = None if isinstance(config, (str, unicode)): src = os.path.relpath(config) _config = RawConfigParser() _config.optionxform = lambda s: s if getattr(config, 'read', None) is not None: _config.readfp(config) path = self.path else: if not os.path.exists(config): log.error("Config file '%s' doesn't exist.", config) sys.exit(1) _config.read(config) path = os.path.dirname(config) for section in reversed(_config.sections()): for key, value in reversed(_config.items(section)): result.append((src, path, section, key, value)) result.append((src, path, section, None, None)) if _config.has_option('global', 'extends'): extends = _config.get('global', 'extends').split() elif _config.has_option('global:global', 'extends'): extends = _config.get('global:global', 'extends').split() else: break stack[0:0] = [ os.path.abspath(os.path.join(path, x)) for x in reversed(extends)] return reversed(result)
def parse_cmake_module(s_in): s_out = [] is_rst_line = False for line in s_in.split('\n'): if is_rst_line: if len(line) > 0: if line[0] != '#': is_rst_line = False else: is_rst_line = False if is_rst_line: s_out.append(line[2:]) if '#.rst:' in line: is_rst_line = True autocmake_entry = '\n'.join(s_out).split('Example autocmake.cfg entry::')[1] autocmake_entry = autocmake_entry.replace('\n ', '\n') buf = StringIO(autocmake_entry) config = RawConfigParser(dict_type=OrderedDict) config.readfp(buf) config_docopt = None config_define = None config_export = None for section in config.sections(): if config.has_option(section, 'docopt'): config_docopt = config.get(section, 'docopt') if config.has_option(section, 'define'): config_define = config.get(section, 'define') if config.has_option(section, 'export'): config_export = config.get(section, 'export') return config_docopt, config_define, config_export
def _read_from_sections(user, collection_url, permission): regex = ConfigParser({'login': user, 'path': collection_url}) for rights in (INITIAL_RIGHTS, settings.DJRADICALE_RIGHTS): for section, values in rights.items(): if not regex.has_section(section): regex.add_section(section) for key, value in values.items(): regex.set( section, key, value % { 'login': re.escape(user), 'path': re.escape(collection_url), }) log.LOGGER.debug("Rights type '%s'" % __name__) for section in regex.sections(): re_user = regex.get(section, 'user') re_collection = regex.get(section, 'collection') log.LOGGER.debug( "Test if '%s:%s' matches against '%s:%s' from section '%s'" % ( user, collection_url, re_user, re_collection, section)) user_match = re.match(re_user, user) if user_match: re_collection = re_collection.format(*user_match.groups()) if re.match(re_collection, collection_url): log.LOGGER.debug("Section '%s' matches" % section) if permission in regex.get(section, 'permission'): return True else: log.LOGGER.debug("Section '%s' does not match" % section) return False
def read_config_file(filename): """ Reads a configuration file to modify the global settings. :param filename: cfg file pathname, read through os.path.normpath """ global LOG_FORMAT, LOG_FILENAME, STATE_REGEX, ZIP_REGEX # Config parser object, load settings for global variables config = RawConfigParser() config.read(path.normpath(filename)) # Sections should be "log" and "validators" for section in config.sections(): # Options for log: format, output # Options for validators: zip_code, state for option in config.options(section): if section == "log" and option == "format": LOG_FORMAT = config.get(section, option) elif section == "log" and option == "output": LOG_FILENAME = config.get(section, option) elif section == "validators" and option == "state": STATE_REGEX = compile(config.get(section, option)) elif section == "validators" and option == "zip_code": ZIP_REGEX = compile(config.get(section, option))
def load(self, csv): conf = RawConfigParser() with open(csv, 'rb') as f: input_bytes = f.read() decoded = input_bytes.decode(chardet.detect(input_bytes)['encoding']) decoded = '[__global__]\n' + decoded conf.read_string(decoded) for sec in conf.sections(): if not self.conf.has_section(sec): self.conf.add_section(sec) self.crap.add_section(sec) for k, v in conf.items(sec): is_crap = False if '__' in v: is_crap = True if not is_crap: if self.conf.has_option(sec, k): if self.conf.get(sec, k).lower() != v.lower(): print('Overwriting locale %s (%r -> %r)' % (k, self.conf.get(sec, k), v)) self.conf.set(sec, k, v) else: if self.crap.has_option(sec, k): print('Overwriting crap locale %s (%r -> %r)' % (k, self.crap.get(sec, k), v)) self.crap.set(sec, k, v)
def load(self, statedir: str = None) -> None: if statedir is None: statedir = self.get_state_dir() statefile = os.path.join(statedir, "state.json") if os.path.exists(statefile): # Load state from JSON file with open(statefile, "rt") as fd: state = json.load(fd) self.projects = state["projects"] return # TODO: remove support for legacy format statefile = os.path.join(statedir, "state") if os.path.exists(statefile): # Load state from legacy .ini file from configparser import RawConfigParser cp = RawConfigParser() cp.read([statefile]) for secname in cp.sections(): if secname.startswith("proj "): name = secname.split(None, 1)[1] fname = cp.get(secname, "fname") self.projects[name] = {"fname": fname} return
def load(self, csv): conf = RawConfigParser() # utf-8-sig per https://bugs.python.org/issue7185#msg94346 with open(csv, encoding='utf-8-sig') as f: conf.read_file(f) for sec in conf.sections(): if not self.conf.has_section(sec): self.conf.add_section(sec) self.crap.add_section(sec) for k, v in conf.items(sec): is_crap = False if '__' in v: is_crap = True if not is_crap: if self.conf.has_option(sec, k): if self.conf.get(sec, k).lower() != v.lower(): print('Overwriting locale %s (%r -> %r)' % (k, self.conf.get(sec, k), v)) self.conf.set(sec, k, v) else: if self.crap.has_option(sec, k): print('Overwriting crap locale %s (%r -> %r)' % (k, self.crap.get(sec, k), v)) self.crap.set(sec, k, v)
def get_sections(self): """ Returns a list of sections in the ini file """ config = RawConfigParser() config.read(self.file_name) return config.sections()
def _check_submodule_no_git(self): """ Like ``_check_submodule_using_git``, but simply parses the .gitmodules file to determine if the supplied path is a git submodule, and does not exec any subprocesses. This can only determine if a path is a submodule--it does not perform updates, etc. This function may need to be updated if the format of the .gitmodules file is changed between git versions. """ gitmodules_path = os.path.abspath('.gitmodules') if not os.path.isfile(gitmodules_path): return False # This is a minimal reader for gitconfig-style files. It handles a few of # the quirks that make gitconfig files incompatible with ConfigParser-style # files, but does not support the full gitconfig syntax (just enough # needed to read a .gitmodules file). gitmodules_fileobj = io.StringIO() # Must use io.open for cross-Python-compatible behavior wrt unicode with io.open(gitmodules_path) as f: for line in f: # gitconfig files are more flexible with leading whitespace; just # go ahead and remove it line = line.lstrip() # comments can start with either # or ; if line and line[0] in (':', ';'): continue gitmodules_fileobj.write(line) gitmodules_fileobj.seek(0) cfg = RawConfigParser() try: cfg.readfp(gitmodules_fileobj) except Exception as exc: log.warn('Malformatted .gitmodules file: {0}\n' '{1} cannot be assumed to be a git submodule.'.format( exc, self.path)) return False for section in cfg.sections(): if not cfg.has_option(section, 'path'): continue submodule_path = cfg.get(section, 'path').rstrip(os.sep) if submodule_path == self.path.rstrip(os.sep): return True return False
def settings_from_config(options): """Try to read config file and parse settings. Args: options: parsed NameSpace, with `config` and maybe `acl` values Returns: tuple of S3Config and PyPIConfig objects, or Nones when missing values """ parser = RawConfigParser() if isinstance(options.config, list): config_file = options.config[0] else: config_file = options.config try: parser.read(config_file) except Exception as error: print(error, file=sys.stderr) key = "pypicloud" # config section key if key not in parser.sections(): return None, None s3_conf = None pypi_conf = None s3_required = ("bucket", "access", "secret") pypi_required = ("repository", "username", "password") if all([parser.has_option(key, opt) for opt in s3_required]): if getattr(options, "acl", None): acl = options.acl[0] elif parser.has_option(key, "acl"): acl = parser.get(key, "acl") else: acl = None s3_conf = S3Config( parser.get(key, "bucket"), parser.get(key, "access"), parser.get(key, "secret"), acl, ) if all([parser.has_option(key, opt) for opt in pypi_required]): pypi_conf = PyPIConfig( parser.get(key, "repository"), parser.get(key, "username"), parser.get(key, "password"), ) return s3_conf, pypi_conf
def __init__(self, paths): parser = RawConfigParser() parser.read(paths) for section in parser.sections(): options = { k: self.__try_parse(v) for k, v in parser.items(section) } section = section.upper() if hasattr(self, section): getattr(self, section).update(options) else: setattr(self, section, options)
def configuration(): config = RawConfigParser() config.read(os.path.expanduser(configfile)) res = {} # Python still lacks of ConfigParser dictionary returning, this # circumvents that missing feature (no, the undecomented and # faulty ConfigParser._sections doesn’t count …) for section in config.sections(): res[section] = {} for option in config.options(section): res[section][option] = config.get(section, option) return res
def from_ini_file(cls, cfg_file, no_default=False): lst = cls(no_default) config = RawConfigParser() if config.read([cfg_file]) != []: for section in config.sections(): try: test = config.get(section, 'TEST') except NoOptionError: test = 'lambda(x): True' lst.append(Definition(section, config.get(section, 'MODULE'), eval(test))) return lst
def set_property(self, section, property, value): """ Change a property in settings ini file """ config = RawConfigParser() config.read(self.file_name) if section in config.sections(): config.set(section, property, str(value).strip()) configfile = open(self.file_name , "w") config.write(configfile) configfile.close() else: print("Error, the settings file seem to be modified manualy")
def getConfigContent(self): content = {} conf = RawConfigParser() conf.read(self.file_path) sections = conf.sections() for section in sections: options = conf.options(section) parametres = {} for option in options: value = conf.get(section, option) parametres.update({option: value}) content.update({section: parametres}) return content
def get_addressbook_dirs(): ''' Get path to addressbook file from default profile. ''' for thome, tprofile in THUNDERBIRD_PROFILES: if os.path.isfile(tprofile): config = RawConfigParser() config.read(tprofile) for section in config.sections(): if config.has_option(section, "Path"): path = config.get(section, "Path") if not os.path.isabs(path): path = os.path.join(thome, path) if os.path.isdir(path): yield path
class Config(object): """Manage configuration read from a secrets file.""" DEFAULTS = { 'username': None, 'password': None, 'authurl': None, 'read-only': '0', } def __init__(self, secrets_file, default_authurl=None): """ Read configuration from the secrets file. A default_authurl can be provided. """ if default_authurl: Config.DEFAULTS['authurl'] = default_authurl stat = os.stat(secrets_file) if stat.st_mode & 0x004 != 0: log = logging.getLogger(__package__) log.warning("%s is world readable, please consider changing its permissions to 0600" % secrets_file) self.secrets_file = secrets_file self.conf = RawConfigParser(Config.DEFAULTS) self.conf.read(secrets_file) def items(self): """ Generator that returns pairs of container name and a dictionary with the values associated to that contaiener. See Config.DEFAULTS for the valid values. """ for name in self.list_containers(): yield name, self.get_container(name) def get_container(self, name): """ Get a dictionary with the values associated to a container. See Config.DEFAULTS for the valid values. """ if not self.conf.has_section(name): raise ValueError("%s not found in %s" % (name, self.secrets_file)) return dict(self.conf.items(name)) def list_containers(self): """List all container names.""" return self.conf.sections()
def columns(p_alt_layout_path=None): """ Returns list with complete column configuration dicts. """ def _get_column_dict(p_cp, p_column): column_dict = dict() filterexpr = p_cp.get(p_column, 'filterexpr') try: title = p_cp.get(p_column, 'title') except NoOptionError: title = filterexpr column_dict['title'] = title or 'Yet another column' column_dict['filterexpr'] = filterexpr column_dict['sortexpr'] = p_cp.get(p_column, 'sortexpr') column_dict['groupexpr'] = p_cp.get(p_column, 'groupexpr') column_dict['show_all'] = p_cp.getboolean(p_column, 'show_all') return column_dict defaults = { 'filterexpr': '', 'sortexpr': config().sort_string(), 'groupexpr': config().group_string(), 'show_all': '0', } cp = RawConfigParser(defaults, strict=False) files = [ "topydo_columns.ini", "topydo_columns.conf", ".topydo_columns", home_config_path('.topydo_columns'), home_config_path('.config/topydo/columns'), "/etc/topydo_columns.conf", ] if p_alt_layout_path is not None: files.insert(0, expanduser(p_alt_layout_path)) for filename in files: if cp.read(filename): break column_list = [] for column in cp.sections(): column_list.append(_get_column_dict(cp, column)) return column_list
def load_language(language): """ Load up a language as a dictionary. """ langdict = dict() if language.find("..") >= 0 or language.find("?") >= 0 or \ language.find("!") >= 0 or language.find("\\") >= 0 or \ language.find("/") >= 0 or language.find("`") >= 0: raise ValueError("language identifier has invalid characters") parser = RawConfigParser() parser.readfp(codecs.open(os.path.abspath(os.path.join("lang", language + ".cfg")), "r", "utf8")) for section in parser.sections(): for option in parser.options(section): langdict[(section + "_" + option).lower()] = \ parser.get(section, option) return langdict
class FactorioLocale: def __init__(self): self.conf = RawConfigParser() self.crap = RawConfigParser() def get_name(self, section, name): return self.conf.get(section, name) or '#%s#%s#' % (section, name) def load(self, csv): conf = RawConfigParser() with open(csv, 'rb') as f: input_bytes = f.read() decoded = input_bytes.decode(chardet.detect(input_bytes)['encoding']) decoded = '[__global__]\n' + decoded conf.read_string(decoded) for sec in conf.sections(): if not self.conf.has_section(sec): self.conf.add_section(sec) self.crap.add_section(sec) for k, v in conf.items(sec): is_crap = False if '__' in v: is_crap = True if not is_crap: if self.conf.has_option(sec, k): if self.conf.get(sec, k).lower() != v.lower(): print('Overwriting locale %s (%r -> %r)' % (k, self.conf.get(sec, k), v)) self.conf.set(sec, k, v) else: if self.crap.has_option(sec, k): print('Overwriting crap locale %s (%r -> %r)' % (k, self.crap.get(sec, k), v)) self.crap.set(sec, k, v) def merge(self): for sec in self.crap.sections(): for k, v in self.crap.items(sec): if not self.conf.has_option(sec, k): print('Using crap locale %s (%r)' % (k, v)) self.conf.set(sec, k, v) def save(self, out): with open(out, 'w') as f: self.conf.write(f)
def read_pypirc(repository=DEFAULT_REPOSITORY, realm=DEFAULT_REALM): """Read the .pypirc file.""" rc = get_pypirc_path() if os.path.exists(rc): config = RawConfigParser() config.read(rc) sections = config.sections() if "distutils" in sections: # let's get the list of servers index_servers = config.get("distutils", "index-servers") _servers = [server.strip() for server in index_servers.split("\n") if server.strip() != ""] if _servers == []: # nothing set, let's try to get the default pypi if "pypi" in sections: _servers = ["pypi"] else: # the file is not properly defined, returning # an empty dict return {} for server in _servers: current = {"server": server} current["username"] = config.get(server, "username") # optional params for key, default in (("repository", DEFAULT_REPOSITORY), ("realm", DEFAULT_REALM), ("password", None)): if config.has_option(server, key): current[key] = config.get(server, key) else: current[key] = default if current["server"] == repository or current["repository"] == repository: return current elif "server-login" in sections: # old format server = "server-login" if config.has_option(server, "repository"): repository = config.get(server, "repository") else: repository = DEFAULT_REPOSITORY return { "username": config.get(server, "username"), "password": config.get(server, "password"), "repository": repository, "server": server, "realm": DEFAULT_REALM, } return {}
def resolve(pipeline): config = load_config(pipeline) resolved = RawConfigParser(dict_type=OrderedDict) resolved.add_section('transmogrifier') for key in sorted(config.get('transmogrifier')): resolved.set('transmogrifier', key, config['transmogrifier'][key]) sections = reduce( add, [get_lines(config.get(section).get('pipeline') or '') for section in config.keys()]) for section in sorted(config.keys()): if section not in sections or section in resolved.sections(): continue resolved.add_section(section) for key in sorted(config.get(section)): resolved.set(section, key, config[section][key]) return resolved
def _setup(self): """Initializes all required attributes, getting the values from the configuration file. """ # On Windows the current path is where PythonService.exe is located. # We change the current path to the location of this script. os.chdir(os.path.dirname(os.path.realpath( __file__ ))) if not os.path.isfile(self.config_file): raise ValueError('Configuration file not found: {}'.format( self.config_file)) config = RawConfigParser(DEFAULTS) config.read((self.config_file,)) self.port = int(config.get('general', 'port')) self.ca_file = config.get('security', 'known_ca') self.cert_key_file = config.get('security', 'cert_key_file') self.crl_file_url = config.get('security', 'crl_file_url') self.db_file = config.get('database', 'database_file') self.logfile = config.get('logging', 'log_file') self.loglevel = config.get('logging', 'log_level') self.logformat = config.get('logging', 'log_format') if not os.path.isfile(self.ca_file): raise ValueError('Known CAs file not found: {}'.format( self.ca_file)) if not os.path.isfile(self.cert_key_file): raise ValueError('Certificate and key file not found: {}'.format( self.cert_key_file)) self.db = Database(self.db_file) if self.db.new_db: for obj in objects.objects: obj.create_table(self.db) conf = dict([(sc, dict(config.items(sc))) for sc in config.sections()]) self.context = dict(conf=conf, db=self.db) for obj in objects.objects: self.context[obj.__name__] = obj logging.basicConfig( filename=self.logfile, format=self.logformat, level=getattr(logging, self.loglevel))
class FactorioLocale: def __init__(self): self.conf = RawConfigParser() self.crap = RawConfigParser() def get_name(self, section, name): return self.conf.get(section, name) or '#%s#%s#' % (section, name) def load(self, csv): conf = RawConfigParser() # utf-8-sig per https://bugs.python.org/issue7185#msg94346 with open(csv, encoding='utf-8-sig') as f: conf.read_file(f) for sec in conf.sections(): if not self.conf.has_section(sec): self.conf.add_section(sec) self.crap.add_section(sec) for k, v in conf.items(sec): is_crap = False if '__' in v: is_crap = True if not is_crap: if self.conf.has_option(sec, k): if self.conf.get(sec, k).lower() != v.lower(): print('Overwriting locale %s (%r -> %r)' % (k, self.conf.get(sec, k), v)) self.conf.set(sec, k, v) else: if self.crap.has_option(sec, k): print('Overwriting crap locale %s (%r -> %r)' % (k, self.crap.get(sec, k), v)) self.crap.set(sec, k, v) def merge(self): for sec in self.crap.sections(): for k, v in self.crap.items(sec): if not self.conf.has_option(sec, k): print('Using crap locale %s (%r)' % (k, v)) self.conf.set(sec, k, v) def save(self, out): with open(out, 'w') as f: self.conf.write(f)
request.get_method = lambda: 'POST' result2 = urllib.request.urlopen(request) def update_ci(server_title, server_type, username, properties): print("Processing credential [%s] for server type [%s] with title [%s]" % (username, server_type, server_title)) config_object = {} try: config_object = get_configuration_object(section, server_type) except: print("Could not find existing config object for title %s" % server_title) config_object["title"] = server_title for item in properties: config_object[item[0]] = item[1] save_configuration_object(config_object) cp = RawConfigParser() #To avoid parser to convert all keys to lowercase by default cp.optionxform = str cp.read(sys.argv[1]) for section in cp.sections(): update_ci(section, cp.get(section, "type"), cp.get(section, "username"), cp.items(section)) print("Updated credentials")
def _read_config(self, config_file, incl_sect=None): """ private method to read config, check some requirements and return dict with config :param config_file: path/file name to read :type config_file: string :param incl_sect : section name to include from other config file, for recursive calls :type incl_sect : string """ raw_config = RawConfigParser() try: raw_config.read(abspath(config_file)) except ParseError as err: self.error_status = ERR_CONFIG_FILE_READ self._logger.error(err) return {} section_names_list = raw_config.sections() if not len(section_names_list): self.error_status = ERR_CONFIG_FILE_CONTENT self._logger.error( 'No sections defined in config file %s - min: [db_connection] and [collections].' % config_file) return {} include_section = section_names_list if incl_sect is None else incl_sect include_config = [] sections_list = OrderedDict() try: for section_name in section_names_list: # don't import if not inside specific chapter if section_name not in include_section: continue # sections_list[section_name] = {} try: include = raw_config.get(section_name, "include").strip('"\' ') if len(include): include_config.append([include, section_name]) except ParseError: pass if section_name == "db_connection": sections_list["connection"] = eval( raw_config.get(section_name, "connection")) elif section_name == 'collections': sections_list["update_list"] = eval( raw_config.get(section_name, 'update_list')) elif section_name == 'mks_settings': if raw_config.has_option('mks_settings', 'task_id'): sections_list['task_id'] = raw_config.get( section_name, 'task_id') # iterate through additional configs from includes now for inc in include_config: if not isabs(inc[0]): inc[0] = join(dirname(config_file), inc[0]) incl_lst = self._read_config(inc[0], inc[1]) for incl_sct in incl_lst: if incl_sct not in sections_list: sections_list[incl_sct] = incl_lst[incl_sct] else: sections_list[incl_sct].update(incl_lst[incl_sct]) except ParseError as err: self.error_status = ERR_CONFIG_FILE_CONTENT self._logger.error('Parse error during config file reading:\n %s' % err) return sections_list
print('Setup Serial Connection... ', end='') port = settings.get('solarmon', 'port', fallback='/dev/ttyUSB0') client = ModbusClient(method='rtu', port=port, baudrate=9600, stopbits=1, parity='N', bytesize=8, timeout=1) client.connect() print('Dome!') print('Loading inverters... ') inverters = [] for section in settings.sections(): if not section.startswith('inverters.'): continue name = section[10:] unit = int(settings.get(section, 'unit')) measurement = settings.get(section, 'measurement') growatt = None try: growatt = Growatt(client, name, unit) except Exception as err: print(err) inverters.append({ 'error_sleep': 0, 'name': name,
def get_keyword(): rcp = RawConfigParser() rcp.read("taobao/config/settings.cfg") section = rcp.sections()[0] search_api = rcp.items(section)[1][1] return search_api
def get_comment_api(): rcp = RawConfigParser() rcp.read("taobao/config/settings.cfg") section = rcp.sections()[0] comment_api = rcp.items(section)[3][1] return comment_api
def get_crawl_page(): rcp = RawConfigParser() rcp.read("taobao/config/settings.cfg") section = rcp.sections()[0] crawl_page = int(rcp.items(section)[4][1]) return crawl_page
class Repository: """Filesystem based transactional key value store On disk layout: dir/README dir/config dir/data/<X / SEGMENTS_PER_DIR>/<X> dir/index.X dir/hints.X """ DEFAULT_MAX_SEGMENT_SIZE = 5 * 1024 * 1024 DEFAULT_SEGMENTS_PER_DIR = 10000 class DoesNotExist(Error): """Repository {} does not exist.""" class AlreadyExists(Error): """Repository {} already exists.""" class InvalidRepository(Error): """{} is not a valid repository.""" class CheckNeeded(Error): """Inconsistency detected. Please run "borg check {}".""" class ObjectNotFound(Error): """Object with key {} not found in repository {}.""" def __init__(self, path, create=False, exclusive=False): self.path = os.path.abspath(path) self.io = None self.lock = None self.index = None self._active_txn = False if create: self.create(self.path) self.open(self.path, exclusive) def __del__(self): self.close() def __repr__(self): return '<%s %s>' % (self.__class__.__name__, self.path) def create(self, path): """Create a new empty repository at `path` """ if os.path.exists(path) and (not os.path.isdir(path) or os.listdir(path)): raise self.AlreadyExists(path) if not os.path.exists(path): os.mkdir(path) with open(os.path.join(path, 'README'), 'w') as fd: fd.write('This is a Borg repository\n') os.mkdir(os.path.join(path, 'data')) config = RawConfigParser() config.add_section('repository') config.set('repository', 'version', '1') config.set('repository', 'segments_per_dir', self.DEFAULT_SEGMENTS_PER_DIR) config.set('repository', 'max_segment_size', self.DEFAULT_MAX_SEGMENT_SIZE) config.set('repository', 'id', hexlify(os.urandom(32)).decode('ascii')) self.save_config(path, config) def save_config(self, path, config): config_path = os.path.join(path, 'config') with open(config_path, 'w') as fd: config.write(fd) def save_key(self, keydata): assert self.config keydata = keydata.decode( 'utf-8') # remote repo: msgpack issue #99, getting bytes self.config.set('repository', 'key', keydata) self.save_config(self.path, self.config) def load_key(self): keydata = self.config.get('repository', 'key') return keydata.encode( 'utf-8') # remote repo: msgpack issue #99, returning bytes def destroy(self): """Destroy the repository at `self.path` """ self.close() os.remove(os.path.join(self.path, 'config')) # kill config first shutil.rmtree(self.path) def get_index_transaction_id(self): indices = sorted((int(name[6:]) for name in os.listdir(self.path) if name.startswith('index.') and name[6:].isdigit())) if indices: return indices[-1] else: return None def get_transaction_id(self): index_transaction_id = self.get_index_transaction_id() segments_transaction_id = self.io.get_segments_transaction_id() if index_transaction_id is not None and segments_transaction_id is None: raise self.CheckNeeded(self.path) # Attempt to automatically rebuild index if we crashed between commit # tag write and index save if index_transaction_id != segments_transaction_id: if index_transaction_id is not None and index_transaction_id > segments_transaction_id: replay_from = None else: replay_from = index_transaction_id self.replay_segments(replay_from, segments_transaction_id) return self.get_index_transaction_id() def open(self, path, exclusive): self.path = path if not os.path.isdir(path): raise self.DoesNotExist(path) self.lock = UpgradableLock(os.path.join(path, 'lock'), exclusive).acquire() self.config = RawConfigParser() self.config.read(os.path.join(self.path, 'config')) if 'repository' not in self.config.sections() or self.config.getint( 'repository', 'version') != 1: raise self.InvalidRepository(path) self.max_segment_size = self.config.getint('repository', 'max_segment_size') self.segments_per_dir = self.config.getint('repository', 'segments_per_dir') self.id = unhexlify(self.config.get('repository', 'id').strip()) self.io = LoggedIO(self.path, self.max_segment_size, self.segments_per_dir) def close(self): if self.lock: if self.io: self.io.close() self.io = None self.lock.release() self.lock = None def commit(self): """Commit transaction """ self.io.write_commit() self.compact_segments() self.write_index() self.rollback() def open_index(self, transaction_id): if transaction_id is None: return NSIndex() return NSIndex.read((os.path.join(self.path, 'index.%d') % transaction_id).encode('utf-8')) def prepare_txn(self, transaction_id, do_cleanup=True): self._active_txn = True try: self.lock.upgrade() except UpgradableLock.ExclusiveLockFailed: # if upgrading the lock to exclusive fails, we do not have an # active transaction. this is important for "serve" mode, where # the repository instance lives on - even if exceptions happened. self._active_txn = False raise if not self.index: self.index = self.open_index(transaction_id) if transaction_id is None: self.segments = {} self.compact = set() else: if do_cleanup: self.io.cleanup(transaction_id) hints = read_msgpack( os.path.join(self.path, 'hints.%d' % transaction_id)) if hints[b'version'] != 1: raise ValueError('Unknown hints file version: %d' % hints['version']) self.segments = hints[b'segments'] self.compact = set(hints[b'compact']) def write_index(self): hints = { b'version': 1, b'segments': self.segments, b'compact': list(self.compact) } transaction_id = self.io.get_segments_transaction_id() write_msgpack(os.path.join(self.path, 'hints.%d' % transaction_id), hints) self.index.write(os.path.join(self.path, 'index.tmp')) os.rename(os.path.join(self.path, 'index.tmp'), os.path.join(self.path, 'index.%d' % transaction_id)) # Remove old indices current = '.%d' % transaction_id for name in os.listdir(self.path): if not name.startswith('index.') and not name.startswith('hints.'): continue if name.endswith(current): continue os.unlink(os.path.join(self.path, name)) self.index = None def compact_segments(self): """Compact sparse segments by copying data into new segments """ if not self.compact: return index_transaction_id = self.get_index_transaction_id() segments = self.segments for segment in sorted(self.compact): if self.io.segment_exists(segment): for tag, key, offset, data in self.io.iter_objects( segment, include_data=True): if tag == TAG_PUT and self.index.get( key, (-1, -1)) == (segment, offset): new_segment, offset = self.io.write_put(key, data) self.index[key] = new_segment, offset segments.setdefault(new_segment, 0) segments[new_segment] += 1 segments[segment] -= 1 elif tag == TAG_DELETE: if index_transaction_id is None or segment > index_transaction_id: self.io.write_delete(key) assert segments[segment] == 0 self.io.write_commit() for segment in sorted(self.compact): assert self.segments.pop(segment) == 0 self.io.delete_segment(segment) self.compact = set() def replay_segments(self, index_transaction_id, segments_transaction_id): self.prepare_txn(index_transaction_id, do_cleanup=False) for segment, filename in self.io.segment_iterator(): if index_transaction_id is not None and segment <= index_transaction_id: continue if segment > segments_transaction_id: break self.segments[segment] = 0 for tag, key, offset in self.io.iter_objects(segment): if tag == TAG_PUT: try: s, _ = self.index[key] self.compact.add(s) self.segments[s] -= 1 except KeyError: pass self.index[key] = segment, offset self.segments[segment] += 1 elif tag == TAG_DELETE: try: s, _ = self.index.pop(key) self.segments[s] -= 1 self.compact.add(s) except KeyError: pass self.compact.add(segment) elif tag == TAG_COMMIT: continue else: raise self.CheckNeeded(self.path) if self.segments[segment] == 0: self.compact.add(segment) self.write_index() self.rollback() def check(self, repair=False): """Check repository consistency This method verifies all segment checksums and makes sure the index is consistent with the data stored in the segments. """ error_found = False def report_error(msg): nonlocal error_found error_found = True logger.error(msg) assert not self._active_txn try: transaction_id = self.get_transaction_id() current_index = self.open_index(transaction_id) except Exception: transaction_id = self.io.get_segments_transaction_id() current_index = None if transaction_id is None: transaction_id = self.get_index_transaction_id() if transaction_id is None: transaction_id = self.io.get_latest_segment() if repair: self.io.cleanup(transaction_id) segments_transaction_id = self.io.get_segments_transaction_id() self.prepare_txn(None) for segment, filename in self.io.segment_iterator(): if segment > transaction_id: continue try: objects = list(self.io.iter_objects(segment)) except IntegrityError as err: report_error(str(err)) objects = [] if repair: self.io.recover_segment(segment, filename) objects = list(self.io.iter_objects(segment)) self.segments[segment] = 0 for tag, key, offset in objects: if tag == TAG_PUT: try: s, _ = self.index[key] self.compact.add(s) self.segments[s] -= 1 except KeyError: pass self.index[key] = segment, offset self.segments[segment] += 1 elif tag == TAG_DELETE: try: s, _ = self.index.pop(key) self.segments[s] -= 1 self.compact.add(s) except KeyError: pass self.compact.add(segment) elif tag == TAG_COMMIT: continue else: report_error('Unexpected tag {} in segment {}'.format( tag, segment)) # We might need to add a commit tag if no committed segment is found if repair and segments_transaction_id is None: report_error( 'Adding commit tag to segment {}'.format(transaction_id)) self.io.segment = transaction_id + 1 self.io.write_commit() if current_index and not repair: if len(current_index) != len(self.index): report_error('Index object count mismatch. {} != {}'.format( len(current_index), len(self.index))) elif current_index: for key, value in self.index.iteritems(): if current_index.get(key, (-1, -1)) != value: report_error( 'Index mismatch for key {}. {} != {}'.format( key, value, current_index.get(key, (-1, -1)))) if repair: self.compact_segments() self.write_index() self.rollback() return not error_found or repair def rollback(self): """ """ self.index = None self._active_txn = False def __len__(self): if not self.index: self.index = self.open_index(self.get_transaction_id()) return len(self.index) def __contains__(self, id): if not self.index: self.index = self.open_index(self.get_transaction_id()) return id in self.index def list(self, limit=None, marker=None): if not self.index: self.index = self.open_index(self.get_transaction_id()) return [ id_ for id_, _ in islice(self.index.iteritems(marker=marker), limit) ] def get(self, id_): if not self.index: self.index = self.open_index(self.get_transaction_id()) try: segment, offset = self.index[id_] return self.io.read(segment, offset, id_) except KeyError: raise self.ObjectNotFound(id_, self.path) def get_many(self, ids, is_preloaded=False): for id_ in ids: yield self.get(id_) def put(self, id, data, wait=True): if not self._active_txn: self.prepare_txn(self.get_transaction_id()) try: segment, _ = self.index[id] self.segments[segment] -= 1 self.compact.add(segment) segment = self.io.write_delete(id) self.segments.setdefault(segment, 0) self.compact.add(segment) except KeyError: pass segment, offset = self.io.write_put(id, data) self.segments.setdefault(segment, 0) self.segments[segment] += 1 self.index[id] = segment, offset def delete(self, id, wait=True): if not self._active_txn: self.prepare_txn(self.get_transaction_id()) try: segment, offset = self.index.pop(id) except KeyError: raise self.ObjectNotFound(id, self.path) self.segments[segment] -= 1 self.compact.add(segment) segment = self.io.write_delete(id) self.compact.add(segment) self.segments.setdefault(segment, 0) def preload(self, ids): """Preload objects (only applies to remote repositories)
def __init__(self, filename): config = RawConfigParser() config.read(filename) self.switches = [] self.default_url = Config._get_val(config, 'settings', 'default_url', None) self.interface = Config._get_val(config, 'settings', 'interface', '0.0.0.0') self.ip_filter = Config._get_val(config, 'settings', 'ip_filter', '0.0.0.0/0').split('/') self.ip_filter[0] = struct.unpack('>L', socket.inet_aton( self.ip_filter[0]))[0] if len(self.ip_filter) == 1: self.ip_filter.append(32) elif len(self.ip_filter) == 2: self.ip_filter[1] = int(self.ip_filter[1]) else: raise ConfigError('Bad IP address format specified for IP filter') if config.has_section('switches'): for cfg, url in config.items('switches'): parsed_cfg = dict(h=None, s=None, b=None, k=None, p=None) for param in cfg.lower().split(','): if param in ('on', 'off'): parsed_cfg['p'] = param == 'on' elif param[-1] in parsed_cfg: parsed_cfg[param[-1]] = int(param[:-1]) else: raise ConfigError( 'Unknown parameter %s while parsing %s = %s' % (param[-1], cfg, url)) self.switches.append((parsed_cfg, url)) #special config for specific URLs url_openers = [] for top_level_url in config.sections(): if not top_level_url.startswith( 'http://') and top_level_url.startswith('https://'): continue auth = Config._get_val(config, top_level_url, 'auth', None) if auth == 'basic': username = Config._get_val(config, top_level_url, 'username', None) password = Config._get_val(config, top_level_url, 'password', None) if username is None: raise ConfigError( "'username' parameter is required when using basic HTTP authentication" ) if password is None: raise ConfigError( "'password' parameter is required when using basic HTTP authentication" ) password_mgr = HTTPPasswordMgrWithDefaultRealm() password_mgr.add_password(None, top_level_url, username, password) url_openers.append(HTTPBasicAuthHandler(password_mgr)) install_opener(build_opener(*url_openers))
class FieldConfig(object): ''' Each field is represented by a section in the config parser For each field there are a set of configurations: type = the type of this field, int, float, str, date, format = the way the content will be formatted for now really only used to date name = an optional name field. If not present the section name will be used. If the name field is "_id" then this will be used as the _id field in the collection. Only one name =_id can be present in any fieldConfig file. The values in this column must be unique in the source data file otherwise loading will fail with a duplicate key error. ''' def __init__(self, cfgFilename, delimiter=",", hasheader=True, gen_id="mongodb", onerror="warn"): ''' Constructor ''' self._logger = logging.getLogger(Logger.LOGGER_NAME) self._idField = None # section on which name == _id self._tags = ["name", "type", "format"] self._cfg = RawConfigParser() self._fieldDict = OrderedDict() self._names = OrderedDict() self._doc_template = OrderedDict() self._id = gen_id self._delimiter = delimiter self._record_count = 0 self._line_count = 0 self._timestamp = None self._pid = os.getpid() self._onerror = onerror self._hasheader = hasheader if cfgFilename: self._fieldDict = self.read(cfgFilename) def hasheader(self): return self._hasheader def add_timestamp(self, timestamp): ''' timestamp = "now" generate time once for all docs timestamp = "gen" generate a new timestamp for each doc timestamp = "none" don't add timestamp field ''' self._timestamp = timestamp if timestamp == "now": self._doc_template["timestamp"] = datetime.utcnow() return self._doc_template def add_filename(self, filename): self._doc_template["filename"] = os.path.basename(filename) return self._doc_template def get_dict_reader(self, f): return csv.DictReader(f, fieldnames=self.fields(), delimiter=self._delimiter) def duplicateIDMsg(self, firstSection, secondSection): msg = textwrap.dedent("""\ The type defintion '_id" occurs in more that one section (there can only be one _id definition). The first section is [%s] and the second section is [%s] """) return msg % (firstSection, secondSection) def delimiter(self): return self._delimiter def read(self, filename): ''' Read fieldfile values into a dictionary without type conversion ''' fieldDict = OrderedDict() result = self._cfg.read(filename) if len(result) == 0: raise FieldConfigException("Couldn't open '%s'" % filename) self._fields = self._cfg.sections() for s in self._fields: #print( "section: '%s'" % s ) fieldDict[s] = {} for o in self._cfg.options(s): #print("option : '%s'" % o ) if not o in self._tags: raise FieldConfigException( "No such field type: %s in section: %s" % (o, s)) if (o == "name"): if (self._cfg.get(s, o) == "_id"): if self._idField == None: self._idField = s else: raise FieldConfigException( self.duplicateIDMsg(self._idField, s)) fieldDict[s][o] = self._cfg.get(s, o) if not "name" in fieldDict[s]: fieldDict[s]["name"] = s self._fieldDict = fieldDict return fieldDict def fieldDict(self): if self._fieldDict is None: raise ValueError( "trying retrieve a fieldDict which has a 'None' value") else: return self._fieldDict def fields(self): return self._fields def hasNewName(self, section): return section != self._fieldDict[section]['name'] def names(self): return self._names def typeData(self, fieldName): return self._cfg.get(fieldName, "type") def formatData(self, fieldName): return self._cfg.get(fieldName, "format") def nameData(self, fieldName): return self._cfg.get(fieldName, "name") # @staticmethod # def generateFieldFile( csvfile, delimiter=',' ): # # with open( csvfile ) as inputfile : # header = inputfile.readline() # # for field in header.split( delimiter ): # print( field ) @staticmethod def guess_type(s): ''' Try and convert a string s to an object. Start with float, then try int and if that doesn't work return the string. ''' if type(s) != type(""): raise ValueError("typeconvert expects a string parameter") v = None try: v = int(s) return (v, "int") except ValueError: pass try: v = float(s) return (v, "float") except ValueError: pass try: v = parse(s) #dateutil.parse.parser return (v, "datetime") except ValueError: pass v = str(s) return (v, "str") def doc_template(self): return self._doc_template def type_convert(self, v, t): ''' Use type entry for the field in the fieldConfig file (.ff) to determine what type conversion to use. ''' v = v.strip() if t == "timestamp": v = datetime.datetime.fromtimestamp(int(v)) elif t == "int": #Ints can be floats try: #print( "converting : '%s' to int" % v ) v = int(v) except ValueError: v = float(v) elif t == "float": v = float(v) elif t == "str": v = str(v) elif t == "datetime" or t == "date": if v == "NULL": v = None else: v = parse(v) else: raise ValueError return v def createDoc(self, dictEntry): ''' WIP Do we make gen id generate a compound key or another field instead of ID ''' self._record_count = self._record_count + 1 doc = {} doc.update(self._doc_template) if self._id == "gen": doc["_id"] = "%i-%i" % (self._pid, self._record_count) if self._timestamp == "gen": doc['timestamp'] = datetime.utcnow() #print( "dictEntry: %s" % dictEntry ) fieldCount = 0 for k in self.fields(): #print( "field: %s" % k ) #print( "value: %s" % dictEntry[ k ]) fieldCount = fieldCount + 1 if dictEntry[k] is None: if self._hasheader: self._line_count = self._record_count + 1 else: self._line_count = self._record_count #self._logger.warn( "value for field '%s' at line %i is None which is not valid", k, self._line_count ) raise ValueError( "value for field '%s' at line %i is None which is not valid (wrong delimiter?)" % (k, self._line_count)) if k.startswith( "blank-" ) and self._onerror == "warn": #ignore blank- columns self._logger.info("Field %i is blank [blank-] : ignoring", fieldCount) continue #try: try: type_field = self.typeData(k) v = self.type_convert(dictEntry[k], type_field) except ValueError: if self._onerror == "fail": self._logger.error("Error at line %i at field '%s'", self._record_count, k) self._logger.error( "type conversion error: Cannot convert '%s' to type %s", dictEntry[k], type_field) raise elif self._onerror == "warn": self._logger.info("Parse failure at line %i at field '%s'", self._record_count, k) self._logger.info( "type conversion error: Cannot convert '%s' to type %s", dictEntry[k], type_field) self._logger.info("Using string type instead") v = str(dictEntry[k]) elif self._onerror == "ignore": v = str(dictEntry[k]) else: raise ValueError("Invalid value for onerror: %s" % self._onerror) if self.hasNewName(k): doc[self.nameData(k)] = v else: doc[k] = v # except ValueError : # self._logger.error( "Value error parsing field : [%s]" , k ) # self._logger.error( "read value is: '%s'", dictEntry[ k ] ) # self._logger.error( "line: %i, '%s'", self._record_count, dictEntry ) # #print( "ValueError parsing filed : %s with value : %s (type of field: $s) " % ( str(k), str(line[ k ]), str(fieldDict[ k]["type"]))) # raise return doc @staticmethod def generate_field_filename(path, ext=".ff"): if not os.path.isfile(path): raise OSError("no such field file '%s'" % path) if not ext.startswith('.'): ext = "." + ext return os.path.splitext(path)[0] + ext @staticmethod def generate_field_file(path, delimiter=",", ext=".ff"): ''' Take a file name and create a field file name and the corresponding field file data from that file by reading the headers and 'sniffing' the first line of data. ''' genfilename = FieldConfig.generate_field_filename(path, ext) with open(genfilename, "w") as genfile: #print( "The field file will be '%s'" % genfilename ) with open(path, "r") as inputfile: header_line = inputfile.readline().rstrip().split( delimiter) #strip newline value_line = inputfile.readline().rstrip().split(delimiter) if len(header_line) != len(value_line): raise ValueError( "Header line and next line have different numbers of columns: %i, %i" % (len(header_line), len(value_line))) fieldCount = 0 for i in header_line: if i == "": i = "blank-%i" % fieldCount #print( i ) i = i.strip() # strip out white space if i.startswith('"'): i = i.strip('"') if i.startswith("'"): i = i.strip("'") i = i.replace('$', '_') # not valid keys for mongodb i = i.replace('.', '_') # not valid keys for mongodb (_, t) = FieldConfig.guess_type(value_line[fieldCount]) fieldCount = fieldCount + 1 genfile.write("[%s]\n" % i) genfile.write("type=%s\n" % t) return genfilename
def _read_pypirc(self): """Reads the .pypirc file.""" rc = self._get_rc_file() if os.path.exists(rc): self.announce("Using PyPI login from %s" % rc) repository = self.repository or self.DEFAULT_REPOSITORY config = RawConfigParser() config.read(rc) sections = config.sections() if "distutils" in sections: # let's get the list of servers index_servers = config.get("distutils", "index-servers") _servers = [ server.strip() for server in index_servers.split("\n") if server.strip() != "" ] if _servers == []: # nothing set, let's try to get the default pypi if "pypi" in sections: _servers = ["pypi"] else: # the file is not properly defined, returning # an empty dict return {} for server in _servers: current = {"server": server} current["username"] = config.get(server, "username") # optional params for key, default in ( ("repository", self.DEFAULT_REPOSITORY), ("realm", self.DEFAULT_REALM), ("password", None), ): if config.has_option(server, key): current[key] = config.get(server, key) else: current[key] = default # work around people having "repository" for the "pypi" # section of their config set to the HTTP (rather than # HTTPS) URL if server == "pypi" and repository in ( self.DEFAULT_REPOSITORY, "pypi", ): current["repository"] = self.DEFAULT_REPOSITORY return current if (current["server"] == repository or current["repository"] == repository): return current elif "server-login" in sections: # old format server = "server-login" if config.has_option(server, "repository"): repository = config.get(server, "repository") else: repository = self.DEFAULT_REPOSITORY return { "username": config.get(server, "username"), "password": config.get(server, "password"), "repository": repository, "server": server, "realm": self.DEFAULT_REALM, } return {}
def _read_pypirc(self): """Reads the .pypirc file.""" rc = self._get_rc_file() if os.path.exists(rc): self.announce('Using PyPI login from %s' % rc) repository = self.repository or self.DEFAULT_REPOSITORY config = RawConfigParser() config.read(rc) sections = config.sections() if 'distutils' in sections: # let's get the list of servers index_servers = config.get('distutils', 'index-servers') _servers = [ server.strip() for server in index_servers.split('\n') if server.strip() != '' ] if _servers == []: # nothing set, let's try to get the default pypi if 'pypi' in sections: _servers = ['pypi'] else: # the file is not properly defined, returning # an empty dict return {} for server in _servers: current = {'server': server} current['username'] = config.get(server, 'username') # optional params for key, default in ( ('repository', self.DEFAULT_REPOSITORY), ('realm', self.DEFAULT_REALM), ('password', None), ): if config.has_option(server, key): current[key] = config.get(server, key) else: current[key] = default # work around people having "repository" for the "pypi" # section of their config set to the HTTP (rather than # HTTPS) URL if server == 'pypi' and repository in ( self.DEFAULT_REPOSITORY, 'pypi', ): current['repository'] = self.DEFAULT_REPOSITORY return current if (current['server'] == repository or current['repository'] == repository): return current elif 'server-login' in sections: # old format server = 'server-login' if config.has_option(server, 'repository'): repository = config.get(server, 'repository') else: repository = self.DEFAULT_REPOSITORY return { 'username': config.get(server, 'username'), 'password': config.get(server, 'password'), 'repository': repository, 'server': server, 'realm': self.DEFAULT_REALM, } return {}
class Config: """A wrapper around RawConfigParser. Provides a ``defaults`` attribute of the same type which can be used to set default values. """ def __init__(self, version=None, _defaults=True): """Use read() to read in an existing config file. version should be an int starting with 0 that gets incremented if you want to register a new upgrade function. If None, upgrade is disabled. """ self._config = ConfigParser(dict_type=_sorted_dict) self.defaults = None if _defaults: self.defaults = Config(_defaults=False) self._version = version self._loaded_version = None self._upgrade_funcs = [] def _do_upgrade(self, func): assert self._loaded_version is not None assert self._version is not None old_version = self._loaded_version new_version = self._version if old_version != new_version: print_d("Config upgrade: %d->%d (%r)" % (old_version, new_version, func)) func(self, old_version, new_version) def get_version(self): """Get the version of the loaded config file (for testing only) Raises Error if no file was loaded or versioning is disabled. """ if self._version is None: raise Error("Versioning disabled") if self._loaded_version is None: raise Error("No file loaded") return self._loaded_version def register_upgrade_function(self, function): """Register an upgrade function that gets called at each read() if the current config version and the loaded version don't match. Can also be registered after read was called. function(config, old_version: int, new_version: int) -> None """ if self._version is None: raise Error("Versioning disabled") self._upgrade_funcs.append(function) # after read(), so upgrade now if self._loaded_version is not None: self._do_upgrade(function) return function def reset(self, section, option): """Reset the value to the default state""" assert self.defaults is not None try: self._config.remove_option(section, option) except NoSectionError: pass def options(self, section): """Returns a list of options available in the specified section.""" try: options = self._config.options(section) except NoSectionError: if self.defaults: return self.defaults.options(section) raise else: if self.defaults: try: options.extend(self.defaults.options(section)) options = list_unique(options) except NoSectionError: pass return options def get(self, section, option, default=_DEFAULT): """get(section, option[, default]) -> str If default is not given or set, raises Error in case of an error """ try: return self._config.get(section, option) except Error: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.get(section, option) except Error: pass raise return default def gettext(self, *args, **kwargs): value = self.get(*args, **kwargs) # make sure there are no surrogates value.encode("utf-8") return value def getbytes(self, section, option, default=_DEFAULT): try: value = self._config.get(section, option) value = value.encode("utf-8", "surrogateescape") return value except (Error, ValueError) as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getbytes(section, option) except Error: pass raise Error(e) return default def getboolean(self, section, option, default=_DEFAULT): """getboolean(section, option[, default]) -> bool If default is not given or set, raises Error in case of an error """ try: return self._config.getboolean(section, option) except (Error, ValueError) as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getboolean(section, option) except Error: pass raise Error(e) return default def getint(self, section, option, default=_DEFAULT): """getint(section, option[, default]) -> int If default is not give or set, raises Error in case of an error """ try: return int(self._config.getfloat(section, option)) except (Error, ValueError) as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getint(section, option) except Error: pass raise Error(e) return default def getfloat(self, section, option, default=_DEFAULT): """getfloat(section, option[, default]) -> float If default is not give or set, raises Error in case of an error """ try: return self._config.getfloat(section, option) except (Error, ValueError) as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getfloat(section, option) except Error: pass raise Error(e) return default def getstringlist(self, section, option, default=_DEFAULT): """getstringlist(section, option[, default]) -> list If default is not given or set, raises Error in case of an error. Gets a list of strings, using CSV to parse and delimit. """ try: value = self._config.get(section, option) parser = csv.reader([value], lineterminator='\n', quoting=csv.QUOTE_MINIMAL) try: vals = next(parser) except (csv.Error, ValueError) as e: raise Error(e) return vals except Error as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getstringlist(section, option) except Error: pass raise Error(e) return default def setstringlist(self, section, option, values): """Saves a list of unicode strings using the csv module""" sw = StringIO() values = [str(v) for v in values] writer = csv.writer(sw, lineterminator='\n', quoting=csv.QUOTE_MINIMAL) writer.writerow(values) self.set(section, option, sw.getvalue()) def setlist(self, section, option, values, sep=","): """Saves a list of str using ',' as a separator and \\ for escaping""" values = [str(v) for v in values] joined = join_escape(values, sep) self.set(section, option, joined) def getlist(self, section, option, default=_DEFAULT, sep=","): """Returns a str list saved with setlist()""" try: value = self._config.get(section, option) return split_escape(value, sep) except (Error, ValueError) as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getlist(section, option, sep=sep) except Error: pass raise Error(e) return default def set(self, section, option, value): """Saves the string representation for the passed value Don't pass unicode, encode first. """ if isinstance(value, bytes): raise TypeError("use setbytes") # RawConfigParser only allows string values but doesn't # scream if they are not (and it only fails before the # first config save..) if not isinstance(value, str): value = str(value) try: self._config.set(section, option, value) except NoSectionError: if self.defaults and self.defaults.has_section(section): self._config.add_section(section) self._config.set(section, option, value) else: raise def settext(self, section, option, value): value = str(value) # make sure there are no surrogates value.encode("utf-8") self.set(section, option, value) def setbytes(self, section, option, value): assert isinstance(value, bytes) value = value.decode("utf-8", "surrogateescape") self.set(section, option, value) def write(self, filename): """Write config to filename. Can raise EnvironmentError """ assert isinstance(filename, fsnative) mkdir(os.path.dirname(filename)) # temporary set the new version for saving if self._version is not None: self.add_section("__config__") self.set("__config__", "version", self._version) try: with atomic_save(filename, "wb") as fileobj: temp = StringIO() self._config.write(temp) data = temp.getvalue().encode("utf-8", "surrogateescape") fileobj.write(data) finally: if self._loaded_version is not None: self.set("__config__", "version", self._loaded_version) def clear(self): """Remove all sections.""" for section in self._config.sections(): self._config.remove_section(section) def is_empty(self): """Whether the config has any sections""" return not self._config.sections() def read(self, filename): """Reads the config from `filename` if the file exists, otherwise does nothing Can raise EnvironmentError, Error. """ try: with open(filename, "rb") as fileobj: fileobj = StringIO(fileobj.read().decode( "utf-8", "surrogateescape")) self._config.readfp(fileobj, filename) except (IOError, OSError): return # don't upgrade if we just created a new config if self._version is not None: self._loaded_version = self.getint("__config__", "version", -1) for func in self._upgrade_funcs: self._do_upgrade(func) def has_option(self, section, option): """If the given section exists, and contains the given option""" return self._config.has_option( section, option) or (self.defaults and self.defaults.has_option(section, option)) def has_section(self, section): """If the given section exists""" return self._config.has_section(section) or ( self.defaults and self.defaults.has_section(section)) def remove_option(self, section, option): """Remove the specified option from the specified section Can raise Error. """ return self._config.remove_option(section, option) def add_section(self, section): """Add a section named section to the instance if it not already exists.""" if not self._config.has_section(section): self._config.add_section(section)