def get_firefox_home_file(needed_file): for firefox_dir in (os.path.expanduser(p) for p in ("~/.mozilla/firefox-3.5/", "~/.mozilla/firefox/")): if os.path.exists(firefox_dir): break else: # no break return None # here we leak firefox_dir config = RawConfigParser({"Default" : 0}) config.read(os.path.join(firefox_dir, "profiles.ini")) path = None for section in config.sections(): if config.has_option(section, "Default") and config.get(section, "Default") == "1": path = config.get (section, "Path") break elif path == None and config.has_option(section, "Path"): path = config.get (section, "Path") if path == None: return "" if path.startswith("/"): return os.path.join(path, needed_file) return os.path.join(firefox_dir, path, needed_file)
def get_opener(): default_opener = urllib2.build_opener() if not exists(YUM_CONF): return default_opener config = RawConfigParser() config.read(YUM_CONF) if not config.has_section('main'): return default_opener if not config.has_option('main', 'proxy'): return default_opener proxy = {} url = config.get('main', 'proxy').strip() if not url: return default_opener http_proxy_handler = urllib2.ProxyHandler({'http': url, 'https': url}) # urllib2 can open HTTPS ressources through a proxy since python 2.6.3 # should be OK on Centos OS (Python 2.6.6) if config.has_option('main', 'proxy_username') and config.has_option( 'main', 'proxy_password'): username = config.get('main', 'proxy_username').strip() password = config.get('main', 'proxy_password').strip() password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm() password_manager.add_password(None, url, username, password) proxy_auth_handler = urllib2.ProxyBasicAuthHandler(password_manager) return urllib2.build_opener(http_proxy_handler, proxy_auth_handler) return urllib2.build_opener(http_proxy_handler)
def __read_config(self): """ Obtiene la configuración de la persona de su archivo .person y devuelve True si es válida """ pparser = RawConfigParser() with codecs.open("config/" + self.person + '.person', 'r', encoding='utf-8') as cf: pparser.readfp(cf) if pparser.has_section("general"): if pparser.has_option('general', 'name'): self.name = pparser.get('general', 'name') if self.name == '': return False else: return False if pparser.has_option('general', 'notify'): self.notify = pparser.getboolean('general', 'notify') if pparser.has_option('general', 'alarm_threshold'): self.alarm_threshold = pparser.getint('general', 'alarm_threshold') if pparser.has_option('general', 'email'): self.email = pparser.get('general', 'email') if self.email == '' and self.notify: return False else: return False return True
class Configuration: def __init__(self, configfile): config_home = os.getenv("CONF_HOME") if config_home: self._configFile = "%s/%s" % (config_home, configfile) else: self._configFile = configfile self._genConf() def _setConfigFile(self, configFile=None): """设置configure文件""" self._configFile = configFile if not self._configFile: raise Exception("配置文件不存在") self._genConf() def _genConf(self): if not self._configFile: raise Exception("没有配置文件") self._config = RawConfigParser() self._config.read(self._configFile) def get(self, sect, opt, default=None): if self._config.has_option(sect, opt): return self._config.get(sect, opt) return default def getint(self, sect, opt, default=None): if self._config.has_option(sect, opt): return self._config.getint(sect, opt) return default def items(self, sect): return self._config.items(sect)
class ParamStore(object): def __init__(self, root_dir, file_name): if not os.path.isdir(root_dir): os.makedirs(root_dir) self._path = os.path.join(root_dir, file_name) self._dirty = False # open config file self._config = RawConfigParser() self._config.read(self._path) def __del__(self): self.flush() def flush(self): if not self._dirty: return self._dirty = False of = open(self._path, "w") self._config.write(of) of.close() def get(self, section, option, default=None): """Get a parameter value and return a string. If default is specified and section or option are not defined in the file, they are created and set to default, which is then the return value. """ if not self._config.has_option(section, option): if default is not None: self.set(section, option, default) return default return self._config.get(section, option) def get_datetime(self, section, option, default=None): result = self.get(section, option, default) if result: return safestrptime(result) return result def set(self, section, option, value): """Set option in section to string value.""" if not self._config.has_section(section): self._config.add_section(section) elif self._config.has_option(section, option) and self._config.get(section, option) == value: return self._config.set(section, option, value) self._dirty = True def unset(self, section, option): """Remove option from section.""" if not self._config.has_section(section): return if self._config.has_option(section, option): self._config.remove_option(section, option) self._dirty = True if not self._config.options(section): self._config.remove_section(section) self._dirty = True
def __read_config(self): """ Obtiene la configuración de la persona de su archivo .person y devuelve True si es válida """ parser = RawConfigParser() with codecs.open("config/" + self.person + '.person', 'r', encoding='utf-8') as f: parser.readfp(f) if parser.has_section("metadata_crawling"): if parser.has_option('metadata_crawling', 'files'): self.__files =\ [e.strip() for e in parser.get('metadata_crawling', 'files').split(',')] if self.__files == ['']: return False else: return False if parser.has_option('metadata_crawling', 'weight'): self.__weight = parser.getint('metadata_crawling', 'weight') if parser.has_option('metadata_crawling', 'access_log'): self.__access_log = parser.get('metadata_crawling', 'access_log') if not self.__access_log: return False else: return False if parser.has_option('metadata_crawling', 'access_log_format'): self.__access_log_format = parser.get('metadata_crawling', 'access_log_format') if not self.__access_log_format: return False else: return False else: return False return True
def __read_config(self): """ Obtiene la configuración de la persona de su archivo .person y devuelve True si es válida """ parser = RawConfigParser() with codecs.open("config/" + self.person + '.person', 'r', encoding='utf-8') as f: parser.readfp(f) if parser.has_section("web_bug"): if parser.has_option("web_bug", 'search_terms'): self.__search_terms = parser.get("web_bug", 'search_terms') if self.__search_terms == '': return False else: return False if parser.has_option("web_bug", 'weight'): self.__weight = parser.getint("web_bug", 'weight') if parser.has_option("web_bug", 'weight_no_search_terms'): self.__weight_no_search_terms = parser.getint("web_bug", 'weight_no_search_terms') if parser.has_option("web_bug", 'weight_visit'): self.__weight_visit = parser.getint("web_bug", 'weight_visit') if parser.has_option("web_bug", 'webbug_log'): self.__webbug_log =\ [e.strip() for e in parser.get("web_bug", 'webbug_log').split(',')] if self.__webbug_log == ['']: return False else: return False else: return False return True
def get_firefox_profiles(self, directory): """ List all profiles """ cp = RawConfigParser() profile_list = [] try: cp.read(os.path.join(directory, 'profiles.ini')) for section in cp.sections(): if section.startswith('Profile') and cp.has_option(section, 'Path'): profile_path = None if cp.has_option(section, 'IsRelative'): if cp.get(section, 'IsRelative') == '1': profile_path = os.path.join(directory, cp.get(section, 'Path').strip()) elif cp.get(section, 'IsRelative') == '0': profile_path = cp.get(section, 'Path').strip() else: # No "IsRelative" in profiles.ini profile_path = os.path.join(directory, cp.get(section, 'Path').strip()) if profile_path: profile_list.append(profile_path) except Exception as e: self.error(u'An error occurred while reading profiles.ini: {}'.format(e)) return profile_list
def init_config(config_file, _reactor, group): '''initialize configuration configuration file should be in the form: [transports] ipN=host:port serN=COMX ''' cp = RawConfigParser() cp.read(config_file) if cp.has_option('global','debug'): global_config.loop_interval = 5.0 global_config.debug = True if cp.has_option('global','smtphost'): global_config.smtphost = cp.get('global','smtphost') if cp.has_option('global','smtpfrom'): global_config.smtpfrom = cp.get('global','smtpfrom') if cp.has_option('global','smtpto'): tos = [x.strip() for x in cp.get('global','smtpto').split(',')] global_config.smtpto = tos section = 'transports' for op in cp.options(section): value = cp.get(section, op) if op.startswith('ip'): ip, port = value.split(':') _reactor.connectTCP(ip, int(port), group.factory()) elif op.startswith('ser'): serialport.SerialPort(group.protocol(),value,_reactor)
def _parse_legacy_config_file(self): """ Parse a legacy configuration file. """ conf = RawConfigParser() conf.read(LEGACY_CONFIG_FILE) styles = self.styles.copy() if conf.has_option('params', 'dm_template'): styles['dm_template'] = conf.get('params', 'dm_template') if conf.has_option('params', 'header_template'): styles['header_template'] = conf.get('params', 'header_template') self.styles.update(styles) if conf.has_option('params', 'logging_level'): self.logging_level = conf.getint('params', 'logging_level') for binding in self.key_bindings: if conf.has_option('keys', binding): custom_key = conf.get('keys', binding) self._set_key_binding(binding, custom_key) palette_labels = [color[0] for color in PALETTE] for label in palette_labels: if conf.has_option('colors', label): custom_fg = conf.get('colors', label) self._set_color(label, custom_fg)
def _parse_config(self, checkout_path="."): gconf_path = os.path.join(checkout_path, self.CONFIG_NAME) gconf = RawConfigParser() gconf.read([gconf_path]) rs = {"repositories": {}, "usergroups": {}, "keys": {}} for user in map( lambda x: x.partition(".pub")[0], filter(lambda x: x.endswith(".pub"), os.listdir(os.path.join(checkout_path, "keydir"))), ): fd = open(os.path.join(checkout_path, "keydir", user + ".pub")) rs["keys"].update({user: {}}) for key in fd.readlines(): rs["keys"][user].update({user + "_key_" + str(len(rs["keys"][user])): key}) fd.close() for section in gconf.sections(): for (option, translated_option) in [("writable", "rw"), ("readonly", "r")]: if gconf.has_option(section, option): for repo_name in gconf.get(section, option).strip().split(): if not rs["repositories"].has_key(repo_name): rs["repositories"].update({repo_name: {}}) if gconf.has_option(section, "members"): rs["repositories"][repo_name].update({translated_option: {}}) for member in gconf.get(section, "members").split(): rs["repositories"][repo_name][translated_option].update( {member: rs["keys"][member].keys()} ) return rs
def get_addressbook_dir(): ''' Get path to addressbook file from default profile. ''' thunderbird_home = None for thome, tprofile in THUNDERBIRD_PROFILES: if os.path.isfile(tprofile): thunderbird_home = thome break if not thunderbird_home: return None config = RawConfigParser() config.read(tprofile) path = None for section in config.sections(): if config.has_option(section, "Default") and \ config.get(section, "Default") == "1" and \ config.has_option(section, "Path"): path = config.get(section, "Path") break elif config.has_option(section, "Path"): path = config.get(section, "Path") if path: path = os.path.join(thunderbird_home, path) # I thought it was strange to return something that is constant here return path
class Config(object): def __init__(self, buildout_dir): self.cfg_path = os.path.join(buildout_dir, '.mr.developer.cfg') self._config = RawConfigParser() self._config.optionxform = lambda s: s self._config.read(self.cfg_path) self.develop = {} self.buildout_args = [] self.rewrites = [] if self._config.has_section('develop'): for package, value in self._config.items('develop'): value = value.lower() if value == 'true': self.develop[package] = True elif value == 'false': self.develop[package] = False elif value == 'auto': self.develop[package] = 'auto' else: raise ValueError("Invalid value in 'develop' section of '%s'" % self.cfg_path) if self._config.has_option('buildout', 'args'): args = self._config.get('buildout', 'args').split("\n") for arg in args: arg = arg.strip() if arg.startswith("'") and arg.endswith("'"): arg = arg[1:-1].replace("\\'", "'") elif arg.startswith('"') and arg.endswith('"'): arg = arg[1:-1].replace('\\"', '"') self.buildout_args.append(arg) (self.buildout_options, self.buildout_settings, _) = \ parse_buildout_args(self.buildout_args[1:]) if self._config.has_option('mr.developer', 'rewrites'): for rewrite in self._config.get('mr.developer', 'rewrites').split('\n'): self.rewrites.append(rewrite.split()) def save(self): self._config.remove_section('develop') self._config.add_section('develop') for package in sorted(self.develop): state = self.develop[package] if state is 'auto': self._config.set('develop', package, 'auto') elif state is True: self._config.set('develop', package, 'true') elif state is False: self._config.set('develop', package, 'false') if not self._config.has_section('buildout'): self._config.add_section('buildout') options, settings, args = parse_buildout_args(self.buildout_args[1:]) # don't store the options when a command was in there if not len(args): self._config.set('buildout', 'args', "\n".join(repr(x) for x in self.buildout_args)) if not self._config.has_section('mr.developer'): self._config.add_section('mr.developer') self._config.set('mr.developer', 'rewrites', "\n".join(" ".join(x) for x in self.rewrites)) self._config.write(open(self.cfg_path, "w"))
class ConfigStore(object): def __init__(self, name): self.config = RawConfigParser() self.file_opts = {} if sys.version_info[0] >= 3: self.file_opts['encoding'] = 'utf-8' if hasattr(appdirs, 'user_config_dir'): data_dir = appdirs.user_config_dir('photini') else: data_dir = appdirs.user_data_dir('photini') if not os.path.isdir(data_dir): os.makedirs(data_dir, mode=0700) self.file_name = os.path.join(data_dir, '%s.ini' % name) if name == 'editor': for old_file_name in (os.path.expanduser('~/photini.ini'), os.path.join(data_dir, 'photini.ini')): if os.path.exists(old_file_name): self.config.read(old_file_name, **self.file_opts) self.save() os.unlink(old_file_name) self.config.read(self.file_name, **self.file_opts) self.timer = QtCore.QTimer() self.timer.setSingleShot(True) self.timer.setInterval(3000) self.timer.timeout.connect(self.save) self.has_section = self.config.has_section def get(self, section, option, default=None): if self.config.has_option(section, option): result = self.config.get(section, option) if sys.version_info[0] < 3: return result.decode('utf-8') return result if default is not None: self.set(section, option, default) return default def set(self, section, option, value): if not self.config.has_section(section): self.config.add_section(section) if (self.config.has_option(section, option) and self.config.get(section, option) == value): return if sys.version_info[0] < 3: value = value.encode('utf-8') self.config.set(section, option, value) self.timer.start() def remove_section(self, section): if not self.config.has_section(section): return for option in self.config.options(section): self.config.remove_option(section, option) self.config.remove_section(section) self.timer.start() def save(self): self.config.write(open(self.file_name, 'w', **self.file_opts)) os.chmod(self.file_name, 0600)
def load_config(): global CONFIG CONFIG = RawConfigParser() CONFIG.read(CONFIG_FILE) if not CONFIG.has_option("DEFAULT", "user"): CONFIG.set("DEFAULT", "user", "") if not CONFIG.has_option("DEFAULT", "password"): CONFIG.set("DEFAULT", "password", "")
def parse_token_file(self, token_file): conf = RawConfigParser() conf.read(token_file) if conf.has_option(SECTION_TOKEN, 'oauth_token'): self.oauth_token = conf.get(SECTION_TOKEN, 'oauth_token') if conf.has_option(SECTION_TOKEN, 'oauth_token_secret'): self.oauth_token_secret = conf.get(SECTION_TOKEN, 'oauth_token_secret')
def _parse_legacy_token_file(self): conf = RawConfigParser() conf.read(LEGACY_TOKEN_FILE) if conf.has_option(SECTION_TOKEN, 'oauth_token'): self.oauth_token = conf.get(SECTION_TOKEN, 'oauth_token') if conf.has_option(SECTION_TOKEN, 'oauth_token'): self.oauth_token_secret = conf.get(SECTION_TOKEN, 'oauth_token_secret')
class AUSConfig(object): required_options = { 'logging': ['logfile'], 'database': ['dburi'] } # Originally, this was done with getattr(logging, level), but it seems bad # to look up, and possibly return, arbitrary keys from a config file so it # was replaced with this simple mapping. loglevels = { 'DEBUG': logging.DEBUG, 'INFO': logging.INFO, 'WARNING': logging.WARNING, 'ERROR': logging.ERROR, 'CRITICAL': logging.CRITICAL, } def __init__(self, filename): self.cfg = RawConfigParser() self.cfg.read(filename) def validate(self): errors = [] for section, options in self.required_options.items(): if not self.cfg.has_section(section): errors.append("Missing section '%s'" % section) for opt in options: if not self.cfg.has_option(section, opt): errors.append("Missing option '%s' from section '%s'" % (opt, section)) return errors def getLogfile(self): return self.cfg.get("logging", "logfile") def getLogLevel(self): try: return self.loglevels[self.cfg.get("logging", "level")] # NoOptionError is raised when we can't find the level in the config. # KeyError is raised when we can't find it in the mapping. except (NoOptionError, KeyError): return logging.WARNING def getDburi(self): return self.cfg.get('database', 'dburi') def getCefLogfile(self): if self.cfg.has_option('logging', 'cef_logfile'): return self.cfg.get('logging', 'cef_logfile') else: return "cef.log" def getDomainWhitelist(self): try: return tuple(a.strip() for a in self.cfg.get('site-specific','domain_whitelist').split(',')) except (NoSectionError, NoOptionError): return tuple()
def read_pypirc(repository=DEFAULT_REPOSITORY, realm=DEFAULT_REALM): """Reads the .pypirc file.""" rc = get_pypirc_path() if os.path.exists(rc): config = RawConfigParser() config.read(rc) sections = config.sections() if 'distutils' in sections: # let's get the list of servers index_servers = config.get('distutils', 'index-servers') _servers = [server.strip() for server in index_servers.split('\n') if server.strip() != ''] if _servers == []: # nothing set, let's try to get the default pypi if 'pypi' in sections: _servers = ['pypi'] else: # the file is not properly defined, returning # an empty dict return {} for server in _servers: current = {'server': server} current['username'] = config.get(server, 'username') # optional params for key, default in (('repository', DEFAULT_REPOSITORY), ('realm', DEFAULT_REALM), ('password', None)): if config.has_option(server, key): current[key] = config.get(server, key) else: current[key] = default if (current['server'] == repository or current['repository'] == repository): return current elif 'server-login' in sections: # old format server = 'server-login' if config.has_option(server, 'repository'): repository = config.get(server, 'repository') else: repository = DEFAULT_REPOSITORY return {'username': config.get(server, 'username'), 'password': config.get(server, 'password'), 'repository': repository, 'server': server, 'realm': DEFAULT_REALM} return {}
class Config(object): """An in-memory container of the API configuration.""" __section_name = "config" def __init__(self, config_path): """Create an in-memory representation of the config. Read the config from `config_path`. If it is invalid, create an empty config representation. """ self.__parser = RawConfigParser() self.__parser.read(config_path) if not self.__parser.has_section(self.__section_name): self.__parser.add_section(self.__section_name) def get(self, name, default=None, must_exist=False): """Return a parameter value. Return the value of a parameter `name`. If it is empty or doesn't exist, return `default` if `must_exist` is ``True`` or raise an exception. """ if self.__parser.has_option(self.__section_name, name): value = self.__parser.get(self.__section_name, name) if value: return value if must_exist: raise MyGengoException("Configuration field: %s is missing or empty." % name) return default def set(self, name, value): """Set a parameter value. Set the value of a parameter `name` to `value`. The value is changed only in memory. """ self.__parser.set(self.__section_name, name, value) def __getattr__(self, name): # if name == '_Config__parser': # return self.__dict__[name] if self.__parser.has_option(self.__section_name, name): return self.__parser.get(self.__section_name, name) raise AttributeError(name) def __setattr__(self, name, value): if name == "_Config__parser": self.__dict__[name] = value else: self.__parser.set(self.__section_name, name, value)
def getprefs(configfile, tkobj, PERSIST): # To keep things simple for possible future preference additions/deletions: # Try to stick to - TK Widget name = prefs dictionary key = ini.get|set name. # EX: mobipath = prefs['mobipath'] = config.get('Defaults', mobipath). prefs = {} # Sane defaults prefs['mobipath'] = os.getcwdu() prefs['outpath'] = os.getcwdu() prefs['apnxpath'] = os.getcwdu() prefs['splitvar'] = 0 prefs['rawvar'] = 0 prefs['dbgvar'] = 0 prefs['hdvar'] = 0 prefs['epubver'] = 0 tkobj.update_idletasks() w = tkobj.winfo_screenwidth() h = tkobj.winfo_screenheight() rootsize = (605, 575) x = w/2 - rootsize[0]/2 y = h/2 - rootsize[1]/2 prefs['windowgeometry'] = (u'%dx%d+%d+%d' % (rootsize + (x, y))) if os.path.exists(configfile) and PERSIST: config = RawConfigParser() try: with codecs.open(configfile, 'r', 'utf-8') as f: config.readfp(f) except: return prefs # Python 2.x's ConfigParser module will not save unicode strings to an ini file (at least on Windows) # no matter how hard you try to smack it around and scare it into doing so. # The workaround (to support unicode path prefences) is to encode the file path using the # unicode_escape 'codec' when writing, and to decode using the unicode_escape codec when reading. if config.has_option('Defaults', 'mobipath'): prefs['mobipath'] = config.get('Defaults', 'mobipath').decode('unicode_escape') if config.has_option('Defaults', 'outpath'): prefs['outpath'] = config.get('Defaults', 'outpath').decode('unicode_escape') if config.has_option('Defaults', 'apnxpath'): prefs['apnxpath'] = config.get('Defaults', 'apnxpath').decode('unicode_escape') if config.has_option('Defaults', 'splitvar'): prefs['splitvar'] = config.getint('Defaults', 'splitvar') if config.has_option('Defaults', 'rawvar'): prefs['rawvar'] = config.getint('Defaults', 'rawvar') if config.has_option('Defaults', 'dbgvar'): prefs['dbgvar'] = config.getint('Defaults', 'dbgvar') if config.has_option('Defaults', 'hdvar'): prefs['hdvar'] = config.getint('Defaults', 'hdvar') if config.has_option('Defaults', 'epubver'): prefs['epubver'] = config.getint('Defaults', 'epubver') if config.has_option('Geometry', 'windowgeometry'): prefs['windowgeometry'] = config.get('Geometry', 'windowgeometry') return prefs
def read_cfg (self): config = RawConfigParser() CreateConfig = False if exists ( GMAN_CONFIG ): cfgfile = open ( GMAN_CONFIG, 'r' ) config.readfp (cfgfile) cfgfile.close () if ( config.has_section('Preferences') and config.has_option('Preferences', 'resultlimit') and config.has_option('Preferences', 'searchchar') ): return config return self.write_cfg (int(GMAN_DEFAULT_RESULTLIMIT), GMAN_DEFAULT_SEARCHCHAR)
def _get_autoreload_programs(self, cfg_file): """Get the set of programs to auto-reload when code changes. Such programs will have autoreload=true in their config section. This can be affected by config file sections or command-line arguments, so we need to read it out of the merged config. """ cfg = RawConfigParser() cfg.readfp(cfg_file) reload_progs = {} graceful_reload_progs = {} for section in cfg.sections(): if section.startswith("program:"): try: patterns = ( cfg.get(section, "autoreload_patterns").split(",") if cfg.has_option(section, "autoreload_patterns") else AUTORELOAD_PATTERNS ) if cfg.getboolean(section, "autoreload"): if cfg.getboolean(section, "autoreload_graceful"): graceful_reload_progs[section.split(":", 1)[1]] = patterns else: reload_progs[section.split(":", 1)[1]] = patterns except NoOptionError: pass return (reload_progs, graceful_reload_progs)
def getFilenameInConfigFile(self, root_path, name): """Get filename field association in config file @root_path: Path where config file is stored @param name: Field name """ path = os.path.join(root_path, self.cfg_filename) # Config file doesn't exists if not os.path.exists(path): return None # If it exists parse it and get value fd = open(path, 'r') value = None try: config = RawConfigParser() config.readfp(fd) if config.has_section(self.cfg_filename_section) and \ config.has_option(self.cfg_filename_section, name): value = config.get(self.cfg_filename_section, name) finally: fd.close() return value
class Configuration(object): """ Singleton class to access application's configuration. """ def __init__(self): self.parser = RawConfigParser() config_file = os.path.join(get_install_dir(), 'config.ini') self.parser.read(config_file) def get(self, section, option): return self.parser.get(section, option) def getboolean(self, section, option): return self.parser.getboolean(section, option) def getfloat(self, section, option): return self.parser.getfload(section, option) def getint(self, section, option): return self.parser.getint(section, option) def has_option(self, section, option): return self.parser.has_option(section, option) def has_section(self, section): return self.parser.has_section(section)
def check_file(self, pkg, filename): root = pkg.dirName() f = root + filename st = getstatusoutput(('desktop-file-validate', f), True) if st[0]: error_printed = False for line in st[1].splitlines(): if 'error: ' in line: printError(pkg, 'invalid-desktopfile', filename, line.split('error: ')[1]) error_printed = True if not error_printed: printError(pkg, 'invalid-desktopfile', filename) if not is_utf8(f): printError(pkg, 'non-utf8-desktopfile', filename) cfp = RawConfigParser() cfp.read(f) binary = None if cfp.has_option('Desktop Entry', 'Exec'): binary = cfp.get('Desktop Entry', 'Exec').split(' ', 1)[0] if binary: found = False if binary.startswith('/'): found = os.path.exists(root + binary) else: for i in STANDARD_BIN_DIRS: if os.path.exists(root + i + binary): # no need to check if the binary is +x, rpmlint does it # in another place found = True break if not found: printWarning(pkg, 'desktopfile-without-binary', filename, binary)
def parse_config(filename, dirs=None): if dirs: filenames = [os.path.join(d, filename) for d in dirs] else: filenames = [filename] config = RawConfigParser() n = config.read(filenames) if not len(n) >= 1: raise PkgNotFound("Could not find file(s) %s" % str(filenames)) # Parse meta and variables sections meta = parse_meta(config) vars = {} if config.has_section('variables'): for name, value in config.items("variables"): vars[name] = _escape_backslash(value) # Parse "normal" sections secs = [s for s in config.sections() if not s in ['meta', 'variables']] sections = {} requires = {} for s in secs: d = {} if config.has_option(s, "requires"): requires[s] = config.get(s, 'requires') for name, value in config.items(s): d[name] = value sections[s] = d return meta, vars, sections, requires
def render(self): """ """ try: store = Store() with open(self.path, 'r') as f: config = RawConfigParser() config.readfp(f, self.path) # parse sections for section,(path,required) in self._sections.iteritems(): if config.has_section(section): for name,value in config.items(section): store.append(path + '.' + name, value) elif required: raise ConfigureError("missing required section %s" % section) # parse items for (section,option),(path,required) in self._options.iteritems(): if config.has_option(section, option): store.append(path, config.get(section, option)) elif required: raise ConfigureError("missing required option %s => %s" % (section, option)) return store except EnvironmentError as e: if self.required: raise ConfigureError("failed to read configuration: %s" % e.strerror) return Store()
class Config(object): def __init__(self): self._load() self._read() def _load(self): path = os.path.join(os.getcwd(), 'syncer.cfg') self.config = RawConfigParser() self.config.read(path) def _read(self): self.sync_crons = self._get_multiline( 'cron', 'sync', []) self.unison_executable = self._get( 'unison', 'executable', '/usr/local/bin/unison') def _get(self, section, option, default): if not self.config.has_option(section, option): return default value = self.config.get(section, option) if value: return value else: return default def _get_multiline(self, section, option, default): value = self._get(section, option, default) if value is not default: return value.strip().split('\n') else: return value
def edit_profilevars(self): config = ProfileVariablesConfig(self.conn, self.current.profile) tmp, path = tempfile.mkstemp('variable', 'paella') tmp = file(path, 'w') config.write(tmp) tmp.close() os.system('$EDITOR %s' %path) tmp = file(path, 'r') newconfig = RawConfigParser() newconfig.readfp(tmp) tmp.close() os.remove(path) cursor = self.variables.env.cursor pclause = Eq('profile', self.current.profile) for trait in config.sections(): tclause = pclause & Eq('trait', trait) if not newconfig.has_section(trait): cursor.delete(clause=tclause) else: for name, value in newconfig.items(trait): nclause = tclause & Eq('name', name) if config.has_option(trait, name): if value != config.get(trait, name): cursor.update(data={'value' : value}, clause=nclause) else: idata = { 'profile' : self.current.profile, 'trait' : trait, 'name' : name, 'value' : value} cursor.insert(data=idata) if config.has_section(trait): for name, value in config.items(trait): if not newconfig.has_option(trait, name): cursor.delete(clause=tclause & Eq('name', name)) self.select_profile(self.current.profile)
def LoadConfig(): global log global clientport global loglocation global bUseSecureHTTP global HTTPPort global HTTPAuthUser global HTTPAuthPass global SSLContext global favicon HTTPAuthPass = None HTTPAuthUser = None SSLContext = None try: config = RawConfigParser() # config parser reads from current directory, when running form a cron tab this is # not defined so we specify the full path config.read(GENMON_CONFIG) # heartbeat server port, must match value in check_generator_system.py and any calling client apps if config.has_option('GenMon', 'server_port'): clientport = config.getint('GenMon', 'server_port') if config.has_option('GenMon', 'loglocation'): loglocation = config.get("GenMon", 'loglocation') # log errors in this module to a file log = mylog.SetupLogger("genserv", loglocation + "genserv.log") if config.has_option('GenMon', 'usehttps'): bUseSecureHTTP = config.getboolean('GenMon', 'usehttps') if config.has_option('GenMon', 'http_port'): HTTPPort = config.getint('GenMon', 'http_port') if config.has_option('GenMon', 'favicon'): favicon = config.get('GenMon', 'favicon') # user name and password require usehttps = True if bUseSecureHTTP: if config.has_option('GenMon', 'http_user'): HTTPAuthUser = config.get('GenMon', 'http_user') HTTPAuthUser = HTTPAuthUser.strip() # No user name or pass specified, disable if HTTPAuthUser == "": HTTPAuthUser = None HTTPAuthPass = None elif config.has_option('GenMon', 'http_pass'): HTTPAuthPass = config.get('GenMon', 'http_pass') HTTPAuthPass = HTTPAuthPass.strip() if bUseSecureHTTP: app.secret_key = os.urandom(12) OldHTTPPort = HTTPPort HTTPPort = 443 if config.has_option('GenMon', 'useselfsignedcert'): bUseSelfSignedCert = config.getboolean('GenMon', 'useselfsignedcert') if bUseSelfSignedCert: SSLContext = 'adhoc' else: if config.has_option('GenMon', 'certfile') and config.has_option( 'GenMon', 'keyfile'): CertFile = config.get('GenMon', 'certfile') KeyFile = config.get('GenMon', 'keyfile') if CheckCertFiles(CertFile, KeyFile): SSLContext = (CertFile, KeyFile) # tuple else: HTTPPort = OldHTTPPort SSLContext = None else: # if we get here then usehttps is enabled but not option for useselfsignedcert # so revert to HTTP HTTPPort = OldHTTPPort except Exception as e1: log.error("Missing config file or config file entries: " + str(e1))
debug = True if config.get('http_students', 'debug') == 'true' else False document_root = config.get('http_students', 'document_root') username_field = config.get('http_students', 'student_username_field') base_path = config.get('http_students', 'base_path') do_on_not_found = config.get('http_students', 'not_found') logout_url = config.get('http_students', 'logout_url') if config.has_section('web'): web_config = dict(config.items('web')) else: web_config = dict() # check which authentication methods should be used authentication_header = True username_header = False if config.has_option('http', 'authentication_header'): authentication_header = config.getboolean('http_students', 'authentication_header') if config.has_option('http_students', 'username_header'): username_header = config.getboolean('http', 'username_header') realm = 'S-BEAT Gesicherter Bereich' app = Flask(__name__, static_url_path=base_path, static_folder=document_root, template_folder=document_root) UserTools.set_user_roles_by_config(config)
class Config(object): """Hold configuration state and utility functions related to config state. This is kind of a catch all for functionality related to the current configuration. """ def __init__(self, filename=None): self.c = RawConfigParser() if filename: if not os.path.exists(filename): raise ValueError('config file does not exist: %s' % filename) self.c.read(filename) if self.c.has_section('path_rewrites'): self._path_rewrites = self.c.items('path_rewrites') else: self._path_rewrites = [] if self.c.has_section('pull_url_rewrites'): self._pull_url_rewrites = self.c.items('pull_url_rewrites') else: self._pull_url_rewrites = [] if self.c.has_section('public_url_rewrites'): self._public_url_rewrites = self.c.items('public_url_rewrites') else: self._public_url_rewrites = [] if self.c.has_section('replicationpathrewrites'): self._replication_path_rewrites = self.c.items( 'replicationpathrewrites') else: self._replication_path_rewrites = [] @property def hg_path(self): """Path to a hg executable.""" if self.c.has_section('programs') and self.c.has_option( 'programs', 'hg'): return self.c.get('programs', 'hg') return 'hg' def parse_wire_repo_path(self, path): """Parse a normalized repository path into a local path.""" for source, dest in self._path_rewrites: if path.startswith(source): return path.replace(source, dest) return path def get_replication_path_rewrite(self, path): """Parse a local path into a wire path""" for source, dest in self._replication_path_rewrites: if path.startswith(source): return dest + path[len(source):] return None def get_pull_url_from_repo_path(self, path): """Obtain a URL to be used for pulling from a local repo path.""" for source, dest in self._pull_url_rewrites: if path.startswith(source): return dest + path[len(source):] return None def get_public_url_from_wire_path(self, path): """Obtain a URL to be used for public advertisement from a wire protocol path.""" for source, dest in self._public_url_rewrites: if path.startswith(source): return dest + path[len(source):] return None def get_client_from_section(self, section, timeout=-1): """Obtain a KafkaClient from a config section. The config section must have a ``hosts`` and ``client_id`` option. An optional ``connect_timeout`` defines the connection timeout. ``timeout`` specifies how many seconds to retry attempting to connect to Kafka in case the initial connection failed. -1 indicates to not retry. This is useful when attempting to connect to a cluster that may still be coming online, for example. """ hosts = self.c.get(section, 'hosts') client_id = self.c.get(section, 'client_id') connect_timeout = 60 if self.c.has_option(section, 'connect_timeout'): connect_timeout = self.c.getint(section, 'connect_timeout') start = time.time() while True: try: return SimpleClient(hosts, client_id=client_id, timeout=connect_timeout) except KafkaUnavailableError: if timeout == -1: raise if time.time() - start > timeout: raise Exception('timeout reached trying to connect to Kafka') time.sleep(0.1)
class TraktForVLC(object): # Check if there's a newer version of TraktForVLC on the project's github, # and print informations on that subject in the logs def __check_version(self): # The leading information for lines printed by this method in logs lead = "VERSION:" # Request the github API to get the releases information github_releases = requests.get( url="https://api.github.com/repos/XaF/TraktForVLC/releases") # If there was a problem getting the releases if not github_releases.ok: self.log.error(lead + "Unable to verify new releases of TraktForVLC") return # Else, we get the json answer releases = github_releases.json() # If we didn't find any release if not releases: self.log.warning(lead + "No releases found on github") return # We get the latest release, all included newest = sorted( releases, key=lambda x: x['tag_name'], reverse=True)[0] if newest['tag_name'][:1] == "v": newest['tag_name'] = newest['tag_name'][1:] newest_V = parse_version(newest['tag_name']) # We get the latest _stable_ release newest_stbl = sorted( [r for r in releases if not r['prerelease']], key=lambda x: x['tag_name'], reverse=True)[0] if newest_stbl['tag_name'][:1] == "v": newest_stbl['tag_name'] = newest_stbl['tag_name'][1:] newest_stbl_V = parse_version(newest_stbl['tag_name']) # We parse the current version current_V = parse_version(__version__) if newest_V <= current_V: self.log.info(lead + "TraktForVLC is up to date") return # We only show the latest stable release if # it's newer than our current release if newest_stbl_V > current_V: # We reformat the publication date of the release published = datetime.datetime.strptime( newest_stbl['published_at'], "%Y-%m-%dT%H:%M:%SZ").strftime('%c') self.log.info(lead + "##### RELEASE #####") self.log.info(lead + "## Stable release %(name)s" % newest_stbl) self.log.info(lead + "## Published on %s" % published) self.log.info(lead + "## Available on %(html_url)s" % newest_stbl) # We only show the latest release if it's not # also the latest stable release if newest_V > newest_stbl_V: # We reformat the publication date of the release published = datetime.datetime.strptime( newest['published_at'], "%Y-%m-%dT%H:%M:%SZ").strftime('%c') self.log.info(lead + "##### RELEASE #####") self.log.info(lead + "## Prerelease %(name)s" % newest) self.log.info(lead + "## Published on %s" % published) self.log.info(lead + "## Available on %(html_url)s" % newest) self.log.info(lead + "###################") def __init__(self, datadir, configfile, daemon=False): # Verify if the log directory exists or create it logdir = os.path.join(datadir, 'logs') if not os.path.exists(logdir): os.mkdir(logdir) # Process log file name if daemon: if LOG_LEVEL is logging.DEBUG: logfile = os.path.join(logdir, "TraktForVLC-DEBUG.log") # Remove existing DEBUG file if os.path.isfile(logfile): os.remove(logfile) else: logfile = os.path.join( logdir, "TraktForVLC-" + DATETIME.strftime("%Y%m%d-%H%M") + ".log") logging.basicConfig( format="%(asctime)s::%(name)s::%(levelname)s::%(message)s", level=LOG_LEVEL, filename=logfile) else: logging.basicConfig( format="%(asctime)s::%(name)s::%(levelname)s::%(message)s", level=LOG_LEVEL, stream=sys.stderr) self.log = logging.getLogger("TraktForVLC") e = 'e' if (sys.platform == 'win32') else 'ë' self.log.info( "## TraktForVLC v" + __version__ + " " + __release_name__) self.log.info("## Copyright (C) 2014-2015 " + "Rapha" + e + "l Beamonte <*****@*****.**>") self.log.info("##") self.log.info("## TraktForVLC is distributed in the hope that it " + "will be useful, but") self.log.info("## with ABSOLUTELY NO WARRANTY. This is free " + "software; you are welcome") self.log.info("## to redistribute and/or modify it under the terms " + "of the GPL2.") self.log.info("") if not os.path.isfile(configfile): self.log.error("Config file " + configfile + " not found, exiting.") sys.exit(1) self.log.debug("Running on %s, with Python %s" % ( platform.platform(), platform.python_version())) self.__check_version() # Load configuration self.configfile = configfile self.__load_config() for loglvl, logstr in AVAILABLE_LOGLVL: if LOG_LEVEL <= loglvl: loglevelstr = logstr break if loglevelstr is None: loglevelstr = str(LOG_LEVEL) self.log.info("Logger level is set to %s" % loglevelstr) self.log.info("-- Will scrobble movies ? %s" % ( 'Yes' if self.DO_SCROBBLE_MOVIE else 'No')) self.log.info("-- Will scrobble tv shows ? %s" % ( 'Yes' if self.DO_SCROBBLE_TV else 'No')) self.log.info("-- Will we mark movies as being watched ? %s" % ( 'Yes' if self.DO_WATCHING_MOVIE else 'No')) self.log.info("-- Will we mark tv shows as being watched ? %s" % ( 'Yes' if self.DO_WATCHING_TV else 'No')) self.log.info("-- Videos will be scrobbled after " + str(self.SCROBBLE_PERCENT) + "% of their duration has been exceeded") self.log.info("-- Timer set to " + str(self.TIMER_INTERVAL) + " secs") self.log.info("-- Video will be marked as \"is watching\" from " + str(self.START_WATCHING_TIMER) + " secs") # VLC configuration self.vlc_ip = self.config.get("VLC", "IP") self.vlc_port = self.config.getint("VLC", "Port") self.log.info("Listening VLC to " + self.vlc_ip + ":" + str(self.vlc_port)) # Trakt app configuration trakt_id = ("0e59f99095515c228d5fbc104e342574" + "941aeeeda95946b8fa50b2b0366609bf") trakt_sc = ("3ed1d013ef80eb0bb45d8da8424b4b61" + "3713abb057ed505683caf0baf1b5c650") # Trakt user information trakt = { "PIN": None, "access_token": None, "refresh_token": None, "Username": None, # Now deprecated in Trakt v2 API "Password": None, # Now deprecated in Trakt v2 API } for opt in trakt.keys(): if self.config.has_option("Trakt", opt): trakt[opt] = self.config.get("Trakt", opt) # Initialize Trakt client modifiedTime = time.strftime( '%Y-%m-%d', time.gmtime(os.path.getmtime(get_file()))) self.trakt_client = TraktClient.TraktClient({ 'username': trakt['Username'], 'password': trakt['Password'], 'client_id': trakt_id, 'client_secret': trakt_sc, 'app_version': __version__, 'app_date': modifiedTime, 'pin': trakt['PIN'], 'access_token': trakt['access_token'], 'refresh_token': trakt['refresh_token'], 'callback_token': self.__callback_token_change, }) # Initialize TraktForVLC's cache self.resetCache() # Initialize tvdb api self.tvdb = tvdb_api.Tvdb(cache=False, language='en') self.watching_now = "" self.vlcTime = 0 self.vlc_connected = True def __load_config(self): self.config = RawConfigParser() self.config.optionxform = str self.config.read(self.configfile) # Initialize timers if SMALL_TIMERS: self.TIMER_INTERVAL = 5 self.START_WATCHING_TIMER = 5 else: self.TIMER_INTERVAL = self.config.getint("TraktForVLC", "Timer") self.START_WATCHING_TIMER = self.config.getint( "TraktForVLC", "StartWatching") # For the use of filenames instead of VLC window title self.USE_FILENAME = ( True if self.config.get("TraktForVLC", "UseFilenames") == 'Yes' else False) # Do we have to scrobble ? self.DO_SCROBBLE_MOVIE = ( True if self.config.get("TraktForVLC", "ScrobbleMovie") == 'Yes' else False) self.DO_SCROBBLE_TV = ( True if self.config.get("TraktForVLC", "ScrobbleTV") == 'Yes' else False) # Do we have to mark as watching ? self.DO_WATCHING_MOVIE = ( True if self.config.get("TraktForVLC", "WatchingMovie") == 'Yes' else False) self.DO_WATCHING_TV = ( True if self.config.get("TraktForVLC", "WatchingTV") == 'Yes' else False) # What percent should we use to scrobble videos ? self.SCROBBLE_PERCENT = self.config.getint( "TraktForVLC", "ScrobblePercent") def __callback_token_change(self, access_token, refresh_token): if self.config.has_option('Trakt', 'PIN'): self.config.remove_option('Trakt', 'PIN') self.config.set('Trakt', 'access_token', access_token) self.config.set('Trakt', 'refresh_token', refresh_token) if not self.__save_config(): self.log.debug("Error while saving tokens in configuration file!") def __save_config(self): saved = False with open(self.configfile, 'w') as configfile: self.config.write(configfile) saved = True return saved def resetCache(self, filepath=None, filename=None, filelength=None): self.log.debug("reset cache (%s, %s)" % (filename, filelength)) self.cache = { "vlc_file_path": filepath, "vlc_file_name": filename, "vlc_file_length": filelength, "scrobbled": False, "movie_info": None, "series_info": None, "series_current_ep": -1, "started_watching": None, "watching": -1, "video": {}, } def resetCacheView(self, episode=None): self.log.debug('reset cache view status (%s)' % episode) self.cache['watching'] = -1 self.cache['scrobbled'] = False self.cache['started_watching'] = None if episode is not None: self.cache['series_current_ep'] = episode def close(self, signal, frame): self.log.info("Program closed by SIGINT") sys.exit(0) def run(self): signal.signal(signal.SIGINT, self.close) while (True): try: self.main() except Exception as e: self.log.error( "An unknown error occurred", exc_info=sys.exc_info()) time.sleep(self.TIMER_INTERVAL) self.main() def main(self): try: vlc = VLCRemote(self.vlc_ip, self.vlc_port) self.vlc_connected = True except: if self.vlc_connected: self.log.info('Could not find VLC running at ' + str(self.vlc_ip) + ':' + str(self.vlc_port)) self.log.debug('Make sure your VLC player is running with ' + '--extraintf=rc --rc-host=' + str(self.vlc_ip) + ':' + str(self.vlc_port) + ' --rc-quiet', exc_info=sys.exc_info()) self.vlc_connected = False # If we were watching a video but we didn't finish it, we # have to cancel the watching status if self.cache["watching"] > -1 and not self.cache["scrobbled"]: self.trakt_client.cancelWatching( self.cache["video"]["imdbid"], self.get_episode(self.cache["video"])) # If there is something in the cache, we can purge the watching # and scrobbled information, so if the video is opened again we # will consider it's a new watch if self.cache['vlc_file_name'] is not None: self.resetCacheView() return vlcStatus = vlc.is_playing() if not vlcStatus: vlc.close() return currentFileLength = vlc.get_length() if not int(currentFileLength) > 0: self.log.debug("main::File length is 0, can't do anything") vlc.close() return currentFilePath = vlc.get_filename() if self.USE_FILENAME: currentFileName = currentFilePath.decode('utf-8') else: currentFileName = vlc.get_title().decode('utf-8') self.vlcTime = int(vlc.get_time()) # Parse the filename to verify if it comes from a stream parsed = urlparse(currentFileName) if parsed.netloc: # Set the filename using only the basename of the parsed path currentFileName = os.path.basename(parsed.path) elif self.USE_FILENAME: # Even if it's not from a stream, if it's a filename we're using # we need to keep only the basename of the parsed path currentFileName = os.path.basename(currentFileName) # Use urllib's unquote to bring back special chars currentFileName = unquote(currentFileName) if (currentFileName == self.cache["vlc_file_name"] and currentFileLength == self.cache['vlc_file_length']): if (self.cache["series_info"] is None and self.cache["movie_info"] is None): video = None elif self.cache["series_info"] is not None: video = self.get_TV(vlc, self.cache["series_info"]) else: video = self.get_Movie(vlc, self.cache["movie_info"]) else: self.log.debug("main::New file: %s (length: %s)" % (currentFileName, currentFileLength)) # If we were watching a video but we didn't finish it, we # have to cancel the watching status if self.cache["watching"] > -1 and not self.cache["scrobbled"]: self.trakt_client.cancelWatching( self.cache["video"]["imdbid"], self.get_episode(self.cache["video"])) self.resetCache(currentFilePath, currentFileName, currentFileLength) self.cache['started_watching'] = (time.time(), self.vlcTime) video = self.get_TV(vlc) if video is None: video = self.get_Movie(vlc) if video is None: self.log.info( "No tv show nor movie found for the current playing video") vlc.close() return # We cache the updated video information self.cache["video"] = video logtitle = video["title"] if video["tv"]: logtitle += (" - %01dx%02d" % (int(video["season"]), int(video["episode"]))) # If we changed episode, we have to reset the view status if (self.cache['watching'] > -1 and self.cache['series_current_ep'] != video['episode']): self.resetCacheView(video['episode']) self.cache['started_watching'] = ( time.time(), self.vlcTime % video['duration']) self.log.info(logtitle + " state : " + str(video["percentage"]) + "%") self.log.debug("main::Video: %s" % str(video)) self.log.debug("main::This video is scrobbled : " + str(self.cache["scrobbled"])) if (((video['tv'] and self.DO_SCROBBLE_TV) or (not video['tv'] and self.DO_SCROBBLE_MOVIE)) and video["percentage"] >= self.SCROBBLE_PERCENT and not self.cache["scrobbled"] and self.cache['started_watching'] is not None and ((time.time() - self.cache['started_watching'][0]) > (float(video['duration']) / 3.0)) and ((self.vlcTime - self.cache['started_watching'][1]) > (float(video['duration']) / 4.0))): self.log.info("Scrobbling " + logtitle + " to Trakt...") try: self.trakt_client.stopWatching(video["imdbid"], video["percentage"], self.get_episode(video)) self.cache["scrobbled"] = True self.log.info(logtitle + " scrobbled to Trakt !") except TraktClient.TraktError as e: self.log.error("An error occurred while trying to scrobble", exc_info=sys.exc_info()) elif (((video['tv'] and self.DO_WATCHING_TV) or (not video['tv'] and self.DO_WATCHING_MOVIE)) and video["percentage"] < self.SCROBBLE_PERCENT and not self.cache["scrobbled"] and ((float(video["duration"]) * float(video["percentage"]) / 100.0) >= self.START_WATCHING_TIMER)): self.log.debug("main::Trying to mark " + logtitle + " watching on Trakt...") try: self.trakt_client.startWatching(video["imdbid"], video["percentage"], self.get_episode(video)) self.log.info(logtitle + " is currently watching on Trakt...") self.cache["watching"] = video["percentage"] except TraktClient.TraktError as e: self.log.error("An error occurred while trying to mark as " + "watching " + logtitle, exc_info=sys.exc_info()) vlc.close() def get_episode(self, video): episode = video["tv"] if episode: episode = (video["show_imdbid"], video["season"], video["episode"]) return episode def get_TV(self, vlc, series_info=(None, None, None)): try: series, seasonNumber, episodeNumber = series_info if series is None: now_playing = parse_tv(self.cache['vlc_file_name']) if not now_playing: self.log.info( "Not able to parse a tvshow from the file title") return seriesName = now_playing['show'] seasonNumber = now_playing['season'] episodeNumber = now_playing['episodes'] if self.valid_TV(seriesName): series = self.tvdb[seriesName] self.cache["series_info"] = ( deepcopy(series), seasonNumber, episodeNumber) if series is not None: duration = int(self.cache['vlc_file_length']) time = int(self.vlcTime) # Calculate the relative time and duration depending on # the number of episodes duration = int(float(duration) / float(len(episodeNumber))) currentEpisode = episodeNumber[int(time / duration)] time = time % duration # Calculate the given percentage for the current episode percentage = time * 100 / duration try: episode = series[int(seasonNumber)][int(currentEpisode)] return self.set_video( True, series['seriesname'], series['firstaired'], episode['imdb_id'], duration, percentage, episode['seasonnumber'], episode['episodenumber'], series['imdb_id']) except: self.log.warning("Episode : No valid episode found !") self.log.debug("get_TV::Here's to help debug", exc_info=sys.exc_info()) self.cache["series_info"] = None return except: self.log.info("No matching tv show found for video playing") self.log.debug("get_TV::Here's to help debug", exc_info=sys.exc_info()) return def valid_TV(self, seriesName): try: series = self.tvdb.search(seriesName) if (len(series) == 0): self.log.debug("valid_TV::no series found with the name '%s'" % seriesName) return False return True except: self.log.debug("valid_TV::no valid title found.", exc_info=sys.exc_info()) return False def get_Movie(self, vlc, movie=None): try: duration = int(self.cache['vlc_file_length']) if movie is None: now_playing = parse_movie(self.cache['vlc_file_name']) title = now_playing['title'] year = now_playing['year'] self.log.debug("get_Movie::Now playing: %s" % str(now_playing)) if self.valid_Movie(self.cache['vlc_file_path'], title, year, duration): movie = self.cache["movie_info"] self.log.debug("get_Movie::Valid movie found: %s" % str(movie)) if movie is not None: playtime = int(self.vlcTime) percentage = playtime * 100 / duration return self.set_video( False, movie['Title'], movie['Year'], movie['imdbID'], duration, percentage) return except: self.log.info("No matching movie found for video playing") self.log.debug("get_Movie::Here's to help debug", exc_info=sys.exc_info()) return def valid_Movie(self, vlcFilePath, vlcTitle, vlcYear, vlcDuration): try: # Get Movie info movie = movie_info.get_movie_info( vlcFilePath, vlcTitle, vlcYear, vlcDuration) # Compare Movie runtime against VLC runtime time = movie['Runtime'] # Verify that the VLC duration is within 5 minutes of the # official duration if (vlcDuration >= time - 300) and (vlcDuration <= time + 300): self.cache["movie_info"] = deepcopy(movie) return True else: self.log.debug("valid_Movie::time range not respected " + "(%d +-300 != %d)" % (time, vlcDuration)) except: self.log.debug("valid_Movie::no valid title found", exc_info=sys.exc_info()) return False return False def set_video(self, tv, title, year, imdbid, duration, percentage, season=-1, episode=-1, show_imdbid=None): video = { 'tv': tv, 'title': title, 'year': year, 'imdbid': imdbid, 'duration': duration, 'percentage': percentage, 'season': season, 'episode': episode, 'show_imdbid': show_imdbid, } return video
CONFIG_FILEPATH = os.path.join(BASE_DIR, 'thirdendpoint', 'settings.ini') config = RawConfigParser() config.read(CONFIG_FILEPATH) DATABASES = { 'default': { 'ENGINE': config.get('database', 'DATABASE_ENGINE'), 'NAME': config.get('database', 'DATABASE_NAME'), 'USER': config.get('database', 'DATABASE_USER'), 'PASSWORD': config.get('database', 'DATABASE_PASSWORD'), 'HOST': config.get('database', 'DATABASE_HOST'), 'PORT': (config.get('database', 'DATABASE_PORT') if config.has_option( 'database', 'DATABASE_PORT') else '') } } # Internationalization # https://docs.djangoproject.com/en/1.8/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True
def GetConfig(self): ConfigSection = "GenMon" try: # read config file config = RawConfigParser() # config parser reads from current directory, when running form a cron tab this is # not defined so we specify the full path config.read(self.ConfigFilePath + 'genmon.conf') # getfloat() raises an exception if the value is not a float # getint() and getboolean() also do this for their respective types if config.has_option(ConfigSection, 'sitename'): self.SiteName = config.get(ConfigSection, 'sitename') if config.has_option(ConfigSection, 'incoming_mail_folder'): self.IncomingEmailFolder = config.get( ConfigSection, 'incoming_mail_folder') # imap folder for incoming mail if config.has_option(ConfigSection, 'processed_mail_folder'): self.ProcessedEmailFolder = config.get( ConfigSection, 'processed_mail_folder') # imap folder for processed mail # server_port, must match value in myclient.py and check_monitor_system.py and any calling client apps if config.has_option(ConfigSection, 'server_port'): self.ServerSocketPort = config.getint(ConfigSection, 'server_port') if config.has_option(ConfigSection, 'loglocation'): self.LogLocation = config.get(ConfigSection, 'loglocation') if config.has_option(ConfigSection, 'syncdst'): self.bSyncDST = config.getboolean(ConfigSection, 'syncdst') if config.has_option(ConfigSection, 'synctime'): self.bSyncTime = config.getboolean(ConfigSection, 'synctime') if config.has_option(ConfigSection, 'disableplatformstats'): self.bDisablePlatformStats = config.getboolean( ConfigSection, 'disableplatformstats') if config.has_option(ConfigSection, 'simulation'): self.Simulation = config.getboolean(ConfigSection, 'simulation') if config.has_option(ConfigSection, 'simulationfile'): self.SimulationFile = config.get(ConfigSection, 'simulationfile') if config.has_option(ConfigSection, 'controllertype'): self.ControllerSelected = config.get(ConfigSection, 'controllertype') if config.has_option(ConfigSection, 'disableweather'): self.DisableWeather = config.getboolean( ConfigSection, 'disableweather') else: self.DisableWeather = False if config.has_option(ConfigSection, 'weatherkey'): self.WeatherAPIKey = config.get(ConfigSection, 'weatherkey') if config.has_option(ConfigSection, 'weatherlocation'): self.WeatherLocation = config.get(ConfigSection, 'weatherlocation') if config.has_option(ConfigSection, 'metricweather'): self.UseMetric = config.getboolean(ConfigSection, 'metricweather') if config.has_option(ConfigSection, 'minimumweatherinfo'): self.WeatherMinimum = config.getboolean( ConfigSection, 'minimumweatherinfo') if config.has_option(ConfigSection, 'readonlyemailcommands'): self.ReadOnlyEmailCommands = config.getboolean( ConfigSection, 'readonlyemailcommands') if config.has_option(ConfigSection, 'optimizeforslowercpu'): self.SlowCPUOptimization = config.getboolean( ConfigSection, 'optimizeforslowercpu') if config.has_option(ConfigSection, 'version'): self.Version = config.get(ConfigSection, 'version') if not self.Version == GENMON_VERSION: self.AddItemToConfFile('version', GENMON_VERSION) self.NewInstall = True else: self.AddItemToConfFile('version', GENMON_VERSION) self.NewInstall = True self.Version = GENMON_VERSION if config.has_option(ConfigSection, "autofeedback"): self.FeedbackEnabled = config.getboolean( ConfigSection, 'autofeedback') else: self.AddItemToConfFile('autofeedback', "False") self.FeedbackEnabled = False # Load saved feedback log if log is present if os.path.isfile(self.FeedbackLogFile): try: with open(self.FeedbackLogFile) as infile: self.FeedbackMessages = json.load(infile) except Exception as e1: os.remove(self.FeedbackLogFile) except Exception as e1: raise Exception( "Missing config file or config file entries (genmon): " + str(e1)) return False return True
class Config(object): sleep_time = 1 max_tries = 5 max_time = 600 halt_task = "halt.sh" interpreter = None filename = None options = None def load_config(self, filename): self.filename = filename self.options = RawConfigParser() # The default optionxform converts option names to lower case. We want # to preserve case, so change the transform function to just return the # str value self.options.optionxform = str if not self.options.read([filename]): log.warn("Couldn't load %s", filename) self.options = None return if self.options.has_option('runner', 'include_dir'): # reload the object including files in config.d config_dir = self.options.get('runner', 'include_dir') configs = [os.path.join(config_dir, c) for c in list_directory(config_dir)] if not self.options.read([filename] + configs): log.warn("Couldn't load %s", config_dir) return if self.options.has_option('runner', 'sleep_time'): self.sleep_time = self.options.getint('runner', 'sleep_time') if self.options.has_option('runner', 'max_tries'): self.max_tries = self.options.getint('runner', 'max_tries') if self.options.has_option('runner', 'max_time'): self.max_time = self.options.getint('runner', 'max_time') if self.options.has_option('runner', 'halt_task'): self.halt_task = self.options.get('runner', 'halt_task') if self.options.has_option('runner', 'interpreter'): self.interpreter = self.options.get('runner', 'interpreter') def get(self, section, option): if self.options and self.options.has_option(section, option): return self.options.get(section, option) return None def get_env(self): retval = {} if self.options and self.options.has_section('env'): for option, value in self.options.items('env'): retval[str(option)] = str(value) if self.filename: retval['RUNNER_CONFIG_CMD'] = '{python} {runner} -c {configfile}'.format( python=sys.executable, runner=os.path.abspath(sys.argv[0]), configfile=os.path.abspath(self.filename), ) return retval def get_task_config(self, taskname): """Returns a dict of the config options for [taskname] or an empty dict otherwise """ if self.options is not None and self.options.has_section(taskname): return dict(self.options.items(taskname)) return {}
class MyConfig (MyLog): #---------------------MyConfig::__init__------------------------------------ def __init__(self, filename = None, section = None, log = None): super(MyLog, self).__init__() self.log = log self.FileName = filename self.Section = section self.CriticalLock = threading.Lock() # Critical Lock (writing conf file) self.InitComplete = False self.LogLocation = "/var/log/" self.Latitude = 51.4769 self.Longitude = 0 self.SendRepeat = 1 self.UseHttps = False self.HTTPPort = 80 self.HTTPSPort = 443 self.RTS_Address = "0x279620" self.Shutters = {} self.ShuttersByName = {} self.Schedule = {} self.Password = "" try: self.config = RawConfigParser() self.config.read(self.FileName) if self.Section == None: SectionList = self.GetSections() if len(SectionList): self.Section = SectionList[0] except Exception as e1: self.LogErrorLine("Error in MyConfig:init: " + str(e1)) return self.InitComplete = True # -------------------- MyConfig::LoadConfig----------------------------------- def LoadConfig(self): parameters = {'LogLocation': str, 'Latitude': float, 'Longitude': float, 'SendRepeat': int, 'UseHttps': bool, 'HTTPPort': int, 'HTTPSPort': int, 'TXGPIO': int, 'RTS_Address': str, "Password": str} self.SetSection("General"); for key, type in parameters.items(): try: if self.HasOption(key): setattr(self, key, self.ReadValue(key, return_type=type)) except Exception as e1: self.LogErrorLine("Missing config file or config file entries in Section General for key "+key+": " + str(e1)) return False parameters = {'MQTT_Server': str, 'MQTT_Port': int, 'MQTT_User': str, 'MQTT_Password': str, 'EnableDiscovery': bool} self.SetSection("MQTT"); for key, type in parameters.items(): try: if self.HasOption(key): setattr(self, key, self.ReadValue(key, return_type=type)) except Exception as e1: self.LogErrorLine("Missing config file or config file entries in Section General for key "+key+": " + str(e1)) return False self.SetSection("Shutters"); shutters = self.GetList(); for key, value in shutters: try: param1 = value.split(",") if param1[1].strip().lower() == 'true': if (len(param1) < 3): param1.append("10"); elif (param1[2].strip() == "") or (int(param1[2]) <= 0) or (int(param1[2]) >= 100): param1[2] = "10" param2 = int(self.ReadValue(key, section="ShutterRollingCodes", return_type=int)) param3 = self.ReadValue(key, section="ShutterIntermediatePositions", return_type=int) if (param3 != None) and ((param3 < 0) or (param3 > 100)): param3 = None self.Shutters[key] = {'name': param1[0], 'code': param2, 'duration': int(param1[2]), 'intermediatePosition': param3} self.ShuttersByName[param1[0]] = key except Exception as e1: self.LogErrorLine("Missing config file or config file entries in Section Shutters for key "+key+": " + str(e1)) return False self.SetSection("Scheduler") schedules = self.GetList() for key, value in schedules: try: param = value.split(",") if param[0].strip().lower() in ('active', 'paused'): self.Schedule[key] = {'active': param[0], 'repeatType': param[1], 'repeatValue': param[2], 'timeType': param[3], 'timeValue': param[4], 'shutterAction': param[5], 'shutterIds': param[6]} except Exception as e1: self.LogErrorLine("Missing config file or config file entries in Section Scheduler for key "+key+": " + str(e1)) return False return True #---------------------MyConfig::setLocation--------------------------------- def setLocation(self, lat, lng): self.WriteValue("Latitude", lat, section="General"); self.WriteValue("Longitude", lng, section="General"); self.Latitude = lat self.Longitude = lng #---------------------MyConfig::setCode--------------------------------- def setCode(self, shutterId, code): self.WriteValue(shutterId, str(code), section="ShutterRollingCodes"); self.Shutters[shutterId]['code'] = code #---------------------MyConfig::HasOption----------------------------------- def HasOption(self, Entry): return self.config.has_option(self.Section, Entry) #---------------------MyConfig::GetList------------------------------------- def GetList(self): return self.config.items(self.Section) #---------------------MyConfig::GetSections--------------------------------- def GetSections(self): return self.config.sections() #---------------------MyConfig::SetSection---------------------------------- def SetSection(self, section): # if not (isinstance(section, str) or isinstance(section, unicode)) or not len(section): if not len(section): self.LogError("Error in MyConfig:ReadValue: invalid section: " + str(section)) return False self.Section = section return True #---------------------MyConfig::ReadValue----------------------------------- def ReadValue(self, Entry, return_type = str, default = None, section = None, NoLog = False): try: if section != None: self.SetSection(section) if self.config.has_option(self.Section, Entry): if return_type == str: return self.config.get(self.Section, Entry) elif return_type == bool: return self.config.getboolean(self.Section, Entry) elif return_type == float: return self.config.getfloat(self.Section, Entry) elif return_type == int: return self.config.getint(self.Section, Entry) else: self.LogErrorLine("Error in MyConfig:ReadValue: invalid type:" + str(return_type)) return default else: return default except Exception as e1: if not NoLog: self.LogErrorLine("Error in MyConfig:ReadValue: " + Entry + ": " + str(e1)) return default #---------------------MyConfig::WriteSection-------------------------------- def WriteSection(self, SectionName): SectionList = self.GetSections() if SectionName in SectionList: self.LogError("Error in WriteSection: Section already exist.") return True try: with self.CriticalLock: with open(self.FileName, "a") as ConfigFile: ConfigFile.write("[" + SectionName + "]") ConfigFile.flush() ConfigFile.close() # update the read data that is cached self.config.read(self.FileName) return True except Exception as e1: self.LogErrorLine("Error in WriteSection: " + str(e1)) return False #---------------------MyConfig::WriteValue---------------------------------- def WriteValue(self, Entry, Value, remove = False, section = None): if section != None: self.SetSection(section) SectionFound = False try: with self.CriticalLock: Found = False ConfigFile = open(self.FileName,'r') FileList = ConfigFile.read().splitlines() ConfigFile.close() mySectionStart = -1; mySectionEnd = -1; myLine = -1; currentLastDataLine = -1; for i, line in enumerate(FileList): if self.LineIsSection(line) and self.Section.lower() == self.GetSectionName(line).lower(): mySectionStart = i elif mySectionStart >=0 and mySectionEnd == -1 and len(line.strip().split('=')) >= 2 and (line.strip().split('='))[0].strip() == Entry: myLine = i elif mySectionStart >=0 and mySectionEnd == -1 and self.LineIsSection(line): mySectionEnd = currentLastDataLine if not line.isspace() and not len(line.strip()) == 0 and not line.strip()[0] == "#": currentLastDataLine = i if mySectionStart >=0 and mySectionEnd == -1: mySectionEnd = currentLastDataLine self.LogDebug("CONFIG FILE WRITE ->> mySectionStart = "+str(mySectionStart)+", mySectionEnd = "+str(mySectionEnd)+", myLine = "+str(myLine)) if mySectionStart == -1: raise Exception("NOT ABLE TO FIND SECTION:"+self.Section) ConfigFile = open(self.FileName,'w') for i, line in enumerate(FileList): if myLine >= 0 and myLine == i and not remove: # I found my line, now write new value ConfigFile.write(Entry + " = " + Value + "\n") elif myLine == -1 and mySectionEnd == i: # Here we have to insert the new record... ConfigFile.write(line+"\n") ConfigFile.write(Entry + " = " + Value + "\n") else: # Nothing special, just copy the previous line.... ConfigFile.write(line+"\n") ConfigFile.flush() ConfigFile.close() # update the read data that is cached self.config.read(self.FileName) return True except Exception as e1: self.LogError("Error in WriteValue: " + str(e1)) return False #---------------------MyConfig::GetSectionName------------------------------ def GetSectionName(self, Line): Line = Line.strip() if Line.startswith("[") and Line.endswith("]") and len(Line) >=3 : Line = Line.replace("[", "") Line = Line.replace("]", "") return Line return "" #---------------------MyConfig::LineIsSection------------------------------- def LineIsSection(self, Line): Line = Line.strip() if Line.startswith("[") and Line.endswith("]") and len(Line) >=3 : return True return False
class MyConfig(MyCommon): #---------------------MyConfig::__init__------------------------------------ def __init__(self, filename=None, section=None, simulation=False, log=None): super(MyConfig, self).__init__() self.log = log self.FileName = filename self.Section = section self.Simulation = simulation self.CriticalLock = threading.Lock( ) # Critical Lock (writing conf file) self.InitComplete = False try: self.config = RawConfigParser() self.config.read(self.FileName) if self.Section == None: SectionList = self.GetSections() if len(SectionList): self.Section = SectionList[0] except Exception as e1: self.LogErrorLine("Error in MyConfig:init: " + str(e1)) return self.InitComplete = True #---------------------MyConfig::HasOption----------------------------------- def HasOption(self, Entry): return self.config.has_option(self.Section, Entry) #---------------------MyConfig::GetList------------------------------------- def GetList(self): return self.config.items(self.Section) #---------------------MyConfig::GetSections--------------------------------- def GetSections(self): return self.config.sections() #---------------------MyConfig::SetSection---------------------------------- def SetSection(self, section): if self.Simulation: return True if not (isinstance(section, str) or isinstance(section, unicode)) or not len(section): self.LogError("Error in MyConfig:ReadValue: invalid section: " + str(section)) return False self.Section = section return True #---------------------MyConfig::ReadValue----------------------------------- def ReadValue(self, Entry, return_type=str, default=None, section=None, NoLog=False): try: if section != None: self.SetSection(section) if self.config.has_option(self.Section, Entry): if return_type == str: return self.config.get(self.Section, Entry) elif return_type == bool: return self.config.getboolean(self.Section, Entry) elif return_type == float: return self.config.getfloat(self.Section, Entry) elif return_type == int: return self.config.getint(self.Section, Entry) else: self.LogErrorLine( "Error in MyConfig:ReadValue: invalid type:" + str(return_type)) return default else: return default except Exception as e1: if not NoLog: self.LogErrorLine("Error in MyConfig:ReadValue: " + Entry + ": " + str(e1)) return default #---------------------MyConfig::WriteSection-------------------------------- def WriteSection(self, SectionName): if self.Simulation: return True SectionList = self.GetSections() if SectionName in SectionList: self.LogError("Error in WriteSection: Section already exist.") return True try: with self.CriticalLock: with open(self.FileName, "a") as ConfigFile: ConfigFile.write("[" + SectionName + "]") ConfigFile.flush() ConfigFile.close() # update the read data that is cached self.config.read(self.FileName) return True except Exception as e1: self.LogErrorLine("Error in WriteSection: " + str(e1)) return False #---------------------MyConfig::WriteValue---------------------------------- def WriteValue(self, Entry, Value, remove=False, section=None): if self.Simulation: return if section != None: self.SetSection(section) SectionFound = False try: with self.CriticalLock: Found = False ConfigFile = open(self.FileName, 'r') FileString = ConfigFile.read() ConfigFile.close() ConfigFile = open(self.FileName, 'w') for line in FileString.splitlines(): if not line.isspace(): # blank lines newLine = line.strip() # strip leading spaces if len(newLine): if not newLine[0] == "#": # not a comment if not SectionFound and not self.LineIsSection( newLine): ConfigFile.write(line + "\n") continue if self.LineIsSection( newLine ) and self.Section.lower( ) != self.GetSectionName(newLine).lower(): if SectionFound and not Found and not remove: # we reached the end of the section ConfigFile.write(Entry + " = " + Value + "\n") Found = True SectionFound = False ConfigFile.write(line + "\n") continue if self.LineIsSection( newLine) and self.Section.lower( ) == self.GetSectionName( newLine).lower(): SectionFound = True ConfigFile.write(line + "\n") continue if not SectionFound: ConfigFile.write(line + "\n") continue items = newLine.split( '=') # split items in line by spaces if len(items) >= 2: items[0] = items[0].strip() if items[0] == Entry: if not remove: ConfigFile.write(Entry + " = " + Value + "\n") Found = True continue ConfigFile.write(line + "\n") # if this is a new entry, then write it to the file, unless we are removing it # this check is if there is not section below the one we are working in, # it will be added to the end of the file if not Found and not remove: ConfigFile.write(Entry + " = " + Value + "\n") ConfigFile.flush() ConfigFile.close() # update the read data that is cached self.config.read(self.FileName) return True except Exception as e1: self.LogError("Error in WriteValue: " + str(e1)) return False #---------------------MyConfig::GetSectionName------------------------------ def GetSectionName(self, Line): if self.Simulation: return "" Line = Line.strip() if Line.startswith("[") and Line.endswith("]") and len(Line) >= 3: Line = Line.replace("[", "") Line = Line.replace("]", "") return Line return "" #---------------------MyConfig::LineIsSection------------------------------- def LineIsSection(self, Line): if self.Simulation: return False Line = Line.strip() if Line.startswith("[") and Line.endswith("]") and len(Line) >= 3: return True return False
def check_apps(pkg, pkg_log): '''Check the apps from the given package via their .desktop files. Returns True if all the apps have been checked successfully or the package contains no appropriate .desktop files and is therefore skipped. False is returned otherwise. 'pkg' - name of the package. 'pkg_log' - file object for the log file. ''' print '\n', SEP, '\n' print 'Processing', pkg try: out = subprocess.Popen(['sudo', 'rpm', '-q', '-l', pkg], stdout=subprocess.PIPE, stderr=pkg_log).communicate()[0] except subprocess.CalledProcessError as e: pkg_log.write('\'sudo rpm -q -l %s\' returned %d.\n' % (pkg, e.returncode)) pkg_log.write('Failed to check %s\n' % pkg) add_to_list(pkg, RES_FAILED_TO_CHECK) return False cfg = RawConfigParser() nfiles = 0 failed = False processed_commands = {} # For Qt3 apps os.environ['PATH'] = os.environ['PATH'] + ":/usr/lib64/qt-3.3/bin/" for fl in out.split('\n'): if re_desktop.match(fl): try: res = cfg.read(fl) except ConfigParserError as e: pkg_log.write('Error while parsing %s: %s.\n' % (fl, str(e))) continue if not res: pkg_log.write('File not found: %s.\n' % fl) continue if not cfg.has_section(SECTION): pkg_log.write('File %s does not have section \'%s\'.\n' % (fl, SECTION)) continue if not cfg.has_option(SECTION, 'Type'): pkg_log.write('File %s does not have \'Type\' option.\n' % fl) continue tp = cfg.get(SECTION, 'Type') if tp != 'Application' and tp != 'Service': pkg_log.write(''.join([ 'File ', fl, ' does not specify and application (Type=', tp, ').\n' ])) continue if not cfg.has_option(SECTION, 'Exec'): pkg_log.write('File %s does not have \'Exec\' option.\n' % fl) continue exec_opt = cfg.get(SECTION, 'Exec') # Cut %-options, if any, as well as the rest of the command. ind = exec_opt.find('%') if ind != -1: exec_opt = exec_opt[:ind] if exec_opt.endswith('\''): exec_opt = exec_opt + 'Test\'' command = exec_opt.strip() if command in processed_commands: pkg_log.write('Command \'%s\' has been already checked.\n' % command) continue processed_commands[command] = True if not cfg.has_option(SECTION, 'Name'): name = None else: name = cfg.get(SECTION, 'Name') # Got a .desktop file of the needed type with the needed # content, check it. nfiles = nfiles + 1 if not do_check(pkg, name, command, pkg_log): failed = True if nfiles == 0: add_to_list(pkg, RES_SKIPPED) ret = True elif failed: add_to_list(pkg, RES_CRASHED) ret = False else: add_to_list(pkg, RES_SUCCEEDED) ret = True pkg_log.write('\nNumber of .desktop files checked: %d.\n' % nfiles) return ret
class _prefs(dict): def __init__(self, filename=default_prefs_file): self._filename = filename def init(self): ''' initialize the preferences, should only be called from app.main ''' # create directory tree of filename if it doesn't yet exist head, tail = os.path.split(self._filename) if not os.path.exists(head): os.makedirs(head) self.config = RawConfigParser() # set the version if the file doesn't exist if not os.path.exists(self._filename): self[config_version_pref] = config_version else: self.config.read(self._filename) version = self[config_version_pref] if version is None: logger.warning('%s has no config version pref' % self._filename) logger.warning('setting the config version to %s.%s' % (config_version)) self[config_version_pref] = config_version # set some defaults if they don't exist if use_sentry_client_pref not in self: self[use_sentry_client_pref] = False if picture_root_pref not in self: self[picture_root_pref] = '' if date_format_pref not in self: self[date_format_pref] = '%d-%m-%Y' if parse_dayfirst_pref not in self: format = self[date_format_pref] if format.find('%d') < format.find('%m'): self[parse_dayfirst_pref] = True else: self[parse_dayfirst_pref] = False if parse_yearfirst_pref not in self: format = self[date_format_pref] if format.find('%Y') == 0 or format.find('%y') == 0: self[parse_yearfirst_pref] = True else: self[parse_yearfirst_pref] = False if units_pref not in self: self[units_pref] = 'metric' @staticmethod def _parse_key(name): index = name.rfind(".") return name[:index], name[index + 1:] def get(self, key, default): ''' get value for key else return default ''' value = self[key] if value is None: return default return value def __getitem__(self, key): section, option = _prefs._parse_key(key) # this doesn't allow None values for preferences if not self.config.has_section(section) or \ not self.config.has_option(section, option): return None else: i = self.config.get(section, option) eval_chars = '{[(' if i == '': return i elif i[0] in eval_chars: # then the value is a dict, list or tuple return eval(i) elif i == 'True' or i == 'False': return eval(i) return i def iteritems(self): return [('%s.%s' % (section, name), value) for section in sorted(prefs.config.sections()) for name, value in prefs.config.items(section)] def __setitem__(self, key, value): section, option = _prefs._parse_key(key) if not self.config.has_section(section): self.config.add_section(section) self.config.set(section, option, str(value)) def __contains__(self, key): section, option = _prefs._parse_key(key) if self.config.has_section(section) and \ self.config.has_option(section, option): return True return False def save(self, force=False): if testing and not force: return try: f = open(self._filename, "w+") self.config.write(f) f.close() except Exception: msg = _("Ghini can't save your user preferences. \n\nPlease " "check the file permissions of your config file:\n %s") \ % self._filename if bauble.gui is not None and bauble.gui.window is not None: import bauble.utils as utils utils.message_dialog(msg, type=gtk.MESSAGE_ERROR, parent=bauble.gui.window) else: logger.error(msg)
class Config(object): """A wrapper around RawConfigParser""" def __init__(self, version=None): """Use read() to read in an existing config file. version should be an int starting with 0 that gets incremented if you want to register a new upgrade function. If None, upgrade is disabled. """ self._config = ConfigParser(dict_type=_sorted_dict) self._version = version self._loaded_version = None self._upgrade_funcs = [] self._initial = {} def _do_upgrade(self, func): assert self._loaded_version is not None assert self._version is not None old_version = self._loaded_version new_version = self._version if old_version != new_version: print_d("Config upgrade: %d->%d (%r)" % (old_version, new_version, func)) func(self, old_version, new_version) def get_version(self): """Get the version of the loaded config file (for testing only) Raises Error if no file was loaded or versioning is disabled. """ if self._version is None: raise Error("Versioning disabled") if self._loaded_version is None: raise Error("No file loaded") return self._loaded_version def register_upgrade_function(self, function): """Register an upgrade function that gets called at each read() if the current config version and the loaded version don't match. Can also be registered after read was called. function(config, old_version: int, new_version: int) -> None """ if self._version is None: raise Error("Versioning disabled") self._upgrade_funcs.append(function) # after read(), so upgrade now if self._loaded_version is not None: self._do_upgrade(function) return function def set_inital(self, section, option, value): """Set an initial value for an option. The section must be added with add_section() first. Adds the value to the config and calling reset() will reset the value to it. """ self.set(section, option, value) self._initial.setdefault(section, {}) self._initial[section].setdefault(option, {}) self._initial[section][option] = value def reset(self, section, option): """Reset the value to the initial state""" value = self._initial[section][option] self.set(section, option, value) def options(self, section): """Returns a list of options available in the specified section.""" return self._config.options(section) def get(self, *args): """get(section, option[, default]) -> str If default is not given, raises Error in case of an error """ if len(args) == 3: try: return self._config.get(*args[:2]) except Error: return args[-1] return self._config.get(*args) def getboolean(self, *args): """getboolean(section, option[, default]) -> bool If default is not given, raises Error in case of an error """ if len(args) == 3: if not isinstance(args[-1], bool): raise ValueError try: return self._config.getboolean(*args[:2]) # ValueError if the value found in the config file # does not match any string representation -> so catch it too except (ValueError, Error): return args[-1] return self._config.getboolean(*args) def getint(self, *args): """getint(section, option[, default]) -> int If default is not give, raises Error in case of an error """ if len(args) == 3: if not isinstance(args[-1], int): raise ValueError try: return self._config.getint(*args[:2]) except Error: return args[-1] return self._config.getint(*args) def getfloat(self, *args): """getfloat(section, option[, default]) -> float If default is not give, raises Error in case of an error """ if len(args) == 3: if not isinstance(args[-1], float): raise ValueError try: return self._config.getfloat(*args[:2]) except Error: return args[-1] return self._config.getfloat(*args) def getstringlist(self, *args): """getstringlist(section, option[, default]) -> list If default is not given, raises Error in case of an error. Gets a list of strings, using CSV to parse and delimit. """ if len(args) == 3: if not isinstance(args[-1], list): raise ValueError try: value = self._config.get(*args[:2]) except Error: return args[-1] else: value = self._config.get(*args) parser = csv.reader([value], lineterminator='\n', quoting=csv.QUOTE_MINIMAL) try: vals = [v.decode('utf-8') for v in parser.next()] except (csv.Error, ValueError) as e: raise Error(e) return vals def setstringlist(self, section, option, values): """Saves a list of unicode strings using the csv module""" sw = StringIO() values = [unicode(v).encode('utf-8') for v in values] writer = csv.writer(sw, lineterminator='\n', quoting=csv.QUOTE_MINIMAL) writer.writerow(values) self._config.set(section, option, sw.getvalue()) def set(self, section, option, value): """Saves the string representation for the passed value Don't pass unicode, encode first. """ # RawConfigParser only allows string values but doesn't # scream if they are not (and it only fails before the # first config save..) if not isinstance(value, str): value = str(value) self._config.set(section, option, value) def setdefault(self, section, option, default): """Like set but only sets the new value if the option isn't set before. """ if not self._config.has_option(section, option): self._config.set(section, option, default) def write(self, filename): """Write config to filename. Can raise EnvironmentError """ assert is_fsnative(filename) mkdir(os.path.dirname(filename)) # temporary set the new version for saving if self._version is not None: self.add_section("__config__") self.set("__config__", "version", self._version) try: with atomic_save(filename, ".tmp", "wb") as fileobj: self._config.write(fileobj) finally: if self._loaded_version is not None: self.set("__config__", "version", self._loaded_version) def clear(self): """Remove all sections and initial values""" for section in self._config.sections(): self._config.remove_section(section) self._initial.clear() def is_empty(self): """Whether the config has any sections""" return not self._config.sections() def read(self, filename): """Reads the config from `filename` if the file exists, otherwise does nothing Can raise EnvironmentError, Error. """ parsed_filenames = self._config.read(filename) # don't upgrade if we just created a new config if parsed_filenames and self._version is not None: self._loaded_version = self.getint("__config__", "version", -1) for func in self._upgrade_funcs: self._do_upgrade(func) def sections(self): """Return a list of the sections available""" return self._config.sections() def has_option(self, section, option): """If the given section exists, and contains the given option""" return self._config.has_option(section, option) def remove_option(self, section, option): """Remove the specified option from the specified section Can raise Error. """ return self._config.remove_option(section, option) def add_section(self, section): """Add a section named section to the instance if it not already exists.""" if not self._config.has_section(section): self._config.add_section(section)
assert hostname assert port try: conn = mongokit.Connection(host=hostname, port=port) except: LOG.error("Failed to connect to MongoDB at %s:%s" % (hostname, port)) raise ## Register our objects with MongoKit conn.register([catalog.Collection, workload.Session]) ## Make sure that the databases that we need are there db_names = conn.database_names() for key in [ 'dataset_db', ]: # FIXME 'workload_db' ]: if not config.has_option(configutil.SECT_MONGODB, key): raise Exception("Missing the configuration option '%s.%s'" % (configutil.SECT_MONGODB, key)) elif not config.get(configutil.SECT_MONGODB, key): raise Exception("Empty configuration option '%s.%s'" % (configutil.SECT_MONGODB, key)) ## FOR ## ---------------------------------------------- ## MONGODB DATABASE RESET ## ---------------------------------------------- metadata_db = conn[config.get(configutil.SECT_MONGODB, 'metadata_db')] dataset_db = conn[config.get(configutil.SECT_MONGODB, 'dataset_db')] if args['reset']: LOG.warn("Dropping collections from %s and %s databases" %
def load_config(self, environ): """Load configuration options Options are read from a config file. Backwards compatibility: - if ConfigFile is not set, opts are loaded from http config - if ConfigFile is set, then the http config must not provide Koji options - In a future version we will load the default hub config regardless - all PythonOptions (except koji.web.ConfigFile) are now deprecated and support for them will disappear in a future version of Koji """ modpy_opts = environ.get('modpy.opts', {}) if 'modpy.opts' in environ: cf = modpy_opts.get('koji.web.ConfigFile', None) cfdir = modpy_opts.get('koji.web.ConfigDir', None) # to aid in the transition from PythonOptions to web.conf, we do # not check the config file by default, it must be configured if not cf and not cfdir: self.logger.warn( 'Warning: configuring Koji via PythonOptions is deprecated. Use web.conf' ) else: cf = environ.get('koji.web.ConfigFile', '/etc/kojiweb/web.conf') cfdir = environ.get('koji.web.ConfigDir', '/etc/kojiweb/web.conf.d') if cfdir: configs = koji.config_directory_contents(cfdir) else: configs = [] if cf and os.path.isfile(cf): configs.append(cf) if configs: config = RawConfigParser() config.read(configs) elif modpy_opts: # presumably we are configured by modpy options config = None else: raise koji.GenericError, "Configuration missing" opts = {} for name, dtype, default in self.cfgmap: if config: key = ('web', name) if config.has_option(*key): if dtype == 'integer': opts[name] = config.getint(*key) elif dtype == 'boolean': opts[name] = config.getboolean(*key) else: opts[name] = config.get(*key) else: opts[name] = default else: if modpy_opts.get(name, None) is not None: if dtype == 'integer': opts[name] = int(modpy_opts.get(name)) elif dtype == 'boolean': opts[name] = modpy_opts.get(name).lower() in ('yes', 'on', 'true', '1') else: opts[name] = modpy_opts.get(name) else: opts[name] = default if 'modpy.conf' in environ: debug = environ['modpy.conf'].get('PythonDebug', '0').lower() opts['PythonDebug'] = (debug in ['yes', 'on', 'true', '1']) opts['Secret'] = koji.util.HiddenValue(opts['Secret']) self.options = opts return opts
def has_option(self, section, option): return RawConfigParser.has_option(self, section, option)
def __init__(self, log, newinstall=False, simulation=False, simulationfile=None, message=None, feedback=None, ConfigFilePath=None): super(GeneratorController, self).__init__(simulation=simulation) self.log = log self.NewInstall = newinstall self.Simulation = simulation self.SimulationFile = simulationfile self.FeedbackPipe = feedback self.MessagePipe = message if ConfigFilePath == None: self.ConfigFilePath = "/etc/" else: self.ConfigFilePath = ConfigFilePath self.Address = None self.SerialPort = "/dev/serial0" self.BaudRate = 9600 self.ModBus = None self.InitComplete = False self.IsStopping = False self.InitCompleteEvent = threading.Event( ) # Event to signal init complete self.CheckForAlarmEvent = threading.Event( ) # Event to signal checking for alarm self.Registers = {} # dict for registers and values self.NotChanged = 0 # stats for registers self.Changed = 0 # stats for registers self.TotalChanged = 0.0 # ratio of changed ragisters self.EnableDebug = False # Used for enabeling debugging self.UseMetric = False self.OutageLog = os.path.dirname( os.path.dirname(os.path.realpath(__file__))) + "/outage.txt" self.PowerLogMaxSize = 15 # 15 MB max size self.PowerLog = os.path.dirname( os.path.dirname(os.path.realpath(__file__))) + "/kwlog.txt" self.TileList = [] # Tile list for GUI if self.Simulation: self.LogLocation = "./" else: self.LogLocation = "/var/log/" self.bDisplayUnknownSensors = False self.SlowCPUOptimization = False self.UtilityVoltsMin = 0 # Minimum reported utility voltage above threshold self.UtilityVoltsMax = 0 # Maximum reported utility voltage above pickup self.SystemInOutage = False # Flag to signal utility power is out self.TransferActive = False # Flag to signal transfer switch is allowing gen supply power self.SiteName = "Home" # The values "Unknown" are checked to validate conf file items are found self.FuelType = "Unknown" self.NominalFreq = "Unknown" self.NominalRPM = "Unknown" self.NominalKW = "Unknown" self.Model = "Unknown" self.EngineDisplacement = "Unknown" self.TankSize = None self.ProgramStartTime = datetime.datetime.now() # used for com metrics self.OutageStartTime = self.ProgramStartTime # if these two are the same, no outage has occured self.LastOutageDuration = self.OutageStartTime - self.OutageStartTime # Read conf entries common to all controllers ConfigSection = "GenMon" try: # read config file config = RawConfigParser() # config parser reads from current directory, when running form a cron tab this is # not defined so we specify the full path config.read(self.ConfigFilePath + 'genmon.conf') # getfloat() raises an exception if the value is not a float # getint() and getboolean() also do this for their respective types if config.has_option(ConfigSection, 'sitename'): self.SiteName = config.get(ConfigSection, 'sitename') if config.has_option(ConfigSection, 'port'): self.SerialPort = config.get(ConfigSection, 'port') if config.has_option(ConfigSection, 'loglocation'): self.LogLocation = config.get(ConfigSection, 'loglocation') if config.has_option(ConfigSection, 'optimizeforslowercpu'): self.SlowCPUOptimization = config.getboolean( ConfigSection, 'optimizeforslowercpu') # optional config parameters, by default the software will attempt to auto-detect the controller # this setting will override the auto detect if config.has_option(ConfigSection, 'metricweather'): self.UseMetric = config.getboolean(ConfigSection, 'metricweather') if config.has_option(ConfigSection, 'enabledebug'): self.EnableDebug = config.getboolean(ConfigSection, 'enabledebug') if config.has_option(ConfigSection, 'displayunknown'): self.bDisplayUnknownSensors = config.getboolean( ConfigSection, 'displayunknown') if config.has_option(ConfigSection, 'outagelog'): self.OutageLog = config.get(ConfigSection, 'outagelog') if config.has_option(ConfigSection, 'kwlog'): self.PowerLog = config.get(ConfigSection, 'kwlog') if config.has_option(ConfigSection, 'kwlogmax'): self.PowerLogMaxSize = config.getint(ConfigSection, 'kwlogmax') if config.has_option(ConfigSection, 'nominalfrequency'): self.NominalFreq = config.get(ConfigSection, 'nominalfrequency') if config.has_option(ConfigSection, 'nominalRPM'): self.NominalRPM = config.get(ConfigSection, 'nominalRPM') if config.has_option(ConfigSection, 'nominalKW'): self.NominalKW = config.get(ConfigSection, 'nominalKW') if config.has_option(ConfigSection, 'model'): self.Model = config.get(ConfigSection, 'model') if config.has_option(ConfigSection, 'fueltype'): self.FuelType = config.get(ConfigSection, 'fueltype') if config.has_option(ConfigSection, 'tanksize'): self.TankSize = config.get(ConfigSection, 'tanksize') except Exception as e1: if not reload: self.FatalError( "Missing config file or config file entries: " + str(e1)) else: self.LogErrorLine("Error reloading config file" + str(e1))
class ParamStore(object): def __init__(self, root_dir, file_name): self._lock = Lock() with self._lock: if not os.path.isdir(root_dir): raise RuntimeError( 'Directory "' + root_dir + '" does not exist.') self._path = os.path.join(root_dir, file_name) self._dirty = False # open config file self._config = RawConfigParser() self._config.read(self._path) def __del__(self): self.flush() def flush(self): if not self._dirty: return with self._lock: self._dirty = False of = open(self._path, 'w') self._config.write(of) of.close() def get(self, section, option, default=None): """Get a parameter value and return a string. If default is specified and section or option are not defined in the file, they are created and set to default, which is then the return value. """ with self._lock: if not self._config.has_option(section, option): if default is not None: self._set(section, option, default) return default return self._config.get(section, option) def get_datetime(self, section, option, default=None): result = self.get(section, option, default) if result: return safestrptime(result) return result def set(self, section, option, value): """Set option in section to string value.""" with self._lock: self._set(section, option, value) def _set(self, section, option, value): if not self._config.has_section(section): self._config.add_section(section) elif (self._config.has_option(section, option) and self._config.get(section, option) == value): return self._config.set(section, option, value) self._dirty = True def unset(self, section, option): """Remove option from section.""" with self._lock: if not self._config.has_section(section): return if self._config.has_option(section, option): self._config.remove_option(section, option) self._dirty = True if not self._config.options(section): self._config.remove_section(section) self._dirty = True
log(2, 'Reading sync message list') parser = RawConfigParser() parser.readfp(open(options.syncMsgList)) syncMsgList = parser.sections() # Read message metadata. Right now we only have 'segment_capacity' # for the standard segment size used for serialization. log(2, 'Reading message metadata...') msgMetadataConfig = RawConfigParser() msgMetadataConfig.readfp(open(options.msgMetadata)) segmentCapacityDict = {} for msgName in msgMetadataConfig.sections(): if msgMetadataConfig.has_option(msgName, 'segment_capacity'): capacity = msgMetadataConfig.get(msgName, 'segment_capacity') segmentCapacityDict[msgName] = capacity # First pass: parse and type-check all protocols for f in files: log(2, os.path.basename(f)) filename = normalizedFilename(f) if f == '-': fd = sys.stdin else: fd = open(f) specstring = fd.read() fd.close()
class AwsCredentials(object): """Wraps a RawConfigParser to treat a section named 'default' as a nomral section.""" __REAL_DEFAULT_SECTION_NAME = 'default' __TEMP_DEFAULT_SECTION_NAME = constant.DEFAULT_SECTION_NAME __REAL_DEFAULT_SECTION_HEADING = '[' + __REAL_DEFAULT_SECTION_NAME + ']' __TEMP_DEFAULT_SECTION_HEADING = '[' + __TEMP_DEFAULT_SECTION_NAME + ']' def __init__(self): self.__config = RawConfigParser() def read(self, path): with open(path, 'r') as file: content = file.read() content = content.replace(self.__REAL_DEFAULT_SECTION_HEADING, self.__TEMP_DEFAULT_SECTION_HEADING) content_io = StringIO(content) self.__config.readfp(content_io) def write(self, path): content_io = StringIO() self.__config.write(content_io) content = content_io.getvalue() content = content.replace(self.__TEMP_DEFAULT_SECTION_HEADING, self.__REAL_DEFAULT_SECTION_HEADING) with open(path, 'w') as file: file.write(content) def __to_temp_name(self, name): if name == self.__REAL_DEFAULT_SECTION_NAME: name = self.__TEMP_DEFAULT_SECTION_NAME return name def __to_real_name(self, name): if name == self.__TEMP_DEFAULT_SECTION_NAME: name = self.__REAL_DEFAULT_SECTION_NAME return name def sections(self): sections = self.__config.sections() sections = [self.__to_real_name(section) for section in sections] return sections; def add_section(self, section): section = self.__to_temp_name(section) self.__config.add_section(section) def has_section(self, section): section = self.__to_temp_name(section) return self.__config.has_section(section) def options(self, section): section = self.__to_temp_name(section) return self.__config.options(section) def has_option(self, section, option): section = self.__to_temp_name(section) return self.__config.has_option(section, option) def get(self, section, option): section = self.__to_temp_name(section) return self.__config.get(section, option) def items(self, section): section = self.__to_temp_name(section) return self.__config.items(section) def set(self, section, option, value): section = self.__to_temp_name(section) self.__config.set(section, option, value) def remove_option(self, section, option): section = self.__to_temp_name(section) return self.__config.remove_option(section, section, option) def remove_section(self, section): section = self.__to_temp_name(section) return self.__config.remove_section(section)
config = RawConfigParser() config.optionxform = str # Make keys case-sensitive #TODO: Maybe switch to two config files so I can have only the keys in the # keymap case-sensitive? config.read(cfg_path) dirty = False if not config.has_section('general'): config.add_section('general') # Change this if you make backwards-incompatible changes to the # section and key naming in the config file. config.set('general', 'cfg_schema', 1) dirty = True for key, val in DEFAULTS['general'].items(): if not config.has_option('general', key): config.set('general', key, str(val)) dirty = True mk_raw = modkeys = config.get('general', 'ModMask') if ' ' in modkeys.strip() and not '<' in modkeys: modkeys = '<%s>' % '><'.join(modkeys.strip().split()) logging.info("Updating modkeys format:\n %r --> %r", mk_raw, modkeys) config.set('general', 'ModMask', modkeys) dirty = True # Either load the keybindings or use and save the defaults if config.has_section('keys'): keymap = dict(config.items('keys')) else: keymap = DEFAULTS['keys']
pprint.pprint(matches) warnings.warn("More than one (%d) match to %s. Using first match (%s)" % ( len(matches), fname, first)) return first # Open config file cp = HandyConfigParser("") cp.read(usr_config_file) if not cp.has_section('windows'): cp.add_section('windows') # Find paths clibs = {'libzmq_include': 'zmq.h', 'libzmq_static': 'zmq.lib', 'czmq_include': 'czmq.h', 'czmq_static': 'czmq.lib'} # , for opt, fname in clibs.items(): if not cp.has_option('windows', opt): fpath = locate(fname) if fpath: print('located %s: %s' % (fname, fpath)) cp.set('windows', opt, fpath) else: warnings.warn("Could not locate %s. Please set %s option in %s to correct path." % (fname, opt, usr_config_file)) with open(usr_config_file, 'w') as fd: cp.write(fd) # Set coverage options in .coveragerc if cov_installed: # Read options covrc = '.coveragerc'
class Config(object): def __init__(self): self._config = RawConfigParser() # A convenient place for other code to store a file name. self.target_file = None def _path_to_config_value(self, path): ''' Return config value for a path. If the path is below CONFIG_DIR, a relative path to it is returned, otherwise, an absolute path is returned. Note: relative path are automatically assumed to be relative to CONFIG_DIR. ''' path = os.path.realpath(os.path.join(CONFIG_DIR, path)) config_dir = os.path.realpath(CONFIG_DIR) + os.sep if path.startswith(config_dir): return path[len(config_dir):] else: return path def _path_from_config_value(self, value): ''' Return a path from a config value. If value is an absolute path, it is returned as is otherwise, an absolute path relative to CONFIG_DIR is returned. ''' return os.path.realpath(os.path.join(CONFIG_DIR, value)) def load(self, fp): self._config = RawConfigParser() try: self._config.readfp(fp) except ConfigParser.Error as e: raise InvalidConfigurationError(str(e)) def clear(self): self._config = RawConfigParser() def save(self, fp): self._config.write(fp) def clone(self): f = StringIO() self.save(f) c = Config() f.seek(0, 0) c.load(f) return c def set_machine_type(self, machine_type): self._set(MACHINE_CONFIG_SECTION, MACHINE_TYPE_OPTION, machine_type) def get_machine_type(self): return self._get(MACHINE_CONFIG_SECTION, MACHINE_TYPE_OPTION, DEFAULT_MACHINE_TYPE) def set_machine_specific_options(self, machine_name, options): if self._config.has_section(machine_name): self._config.remove_section(machine_name) self._config.add_section(machine_name) for k, v in options.items(): self._config.set(machine_name, k, str(v)) def get_machine_specific_options(self, machine_name): def convert(p, v): try: return p[1](v) except ValueError: return p[0] machine = machine_registry.get(machine_name) info = machine.get_option_info() defaults = {k: v[0] for k, v in info.items()} if self._config.has_section(machine_name): options = { o: self._config.get(machine_name, o) for o in self._config.options(machine_name) if o in info } options = {k: convert(info[k], v) for k, v in options.items()} defaults.update(options) return defaults def set_dictionary_file_names(self, filenames): if self._config.has_section(DICTIONARY_CONFIG_SECTION): self._config.remove_section(DICTIONARY_CONFIG_SECTION) self._config.add_section(DICTIONARY_CONFIG_SECTION) filenames = [self._path_to_config_value(path) for path in filenames] for ordinal, filename in enumerate(filenames, start=1): option = DICTIONARY_FILE_OPTION + str(ordinal) self._config.set(DICTIONARY_CONFIG_SECTION, option, filename) def get_dictionary_file_names(self): filenames = [] if self._config.has_section(DICTIONARY_CONFIG_SECTION): options = filter(lambda x: x.startswith(DICTIONARY_FILE_OPTION), self._config.options(DICTIONARY_CONFIG_SECTION)) options.sort(key=_dict_entry_key) filenames = [ self._config.get(DICTIONARY_CONFIG_SECTION, o) for o in options ] if not filenames: filenames = DEFAULT_DICTIONARIES filenames = [self._path_from_config_value(path) for path in filenames] return filenames def set_log_file_name(self, filename): filename = self._path_to_config_value(filename) self._set(LOGGING_CONFIG_SECTION, LOG_FILE_OPTION, filename) def get_log_file_name(self): filename = self._get(LOGGING_CONFIG_SECTION, LOG_FILE_OPTION, DEFAULT_LOG_FILE) return self._path_from_config_value(filename) def set_enable_stroke_logging(self, log): self._set(LOGGING_CONFIG_SECTION, ENABLE_STROKE_LOGGING_OPTION, log) def get_enable_stroke_logging(self): return self._get_bool(LOGGING_CONFIG_SECTION, ENABLE_STROKE_LOGGING_OPTION, DEFAULT_ENABLE_STROKE_LOGGING) def set_enable_translation_logging(self, log): self._set(LOGGING_CONFIG_SECTION, ENABLE_TRANSLATION_LOGGING_OPTION, log) def get_enable_translation_logging(self): return self._get_bool(LOGGING_CONFIG_SECTION, ENABLE_TRANSLATION_LOGGING_OPTION, DEFAULT_ENABLE_TRANSLATION_LOGGING) def set_auto_start(self, b): self._set(MACHINE_CONFIG_SECTION, MACHINE_AUTO_START_OPTION, b) def get_auto_start(self): return self._get_bool(MACHINE_CONFIG_SECTION, MACHINE_AUTO_START_OPTION, DEFAULT_MACHINE_AUTO_START) def set_show_stroke_display(self, b): self._set(STROKE_DISPLAY_SECTION, STROKE_DISPLAY_SHOW_OPTION, b) def get_show_stroke_display(self): return self._get_bool(STROKE_DISPLAY_SECTION, STROKE_DISPLAY_SHOW_OPTION, DEFAULT_STROKE_DISPLAY_SHOW) def set_show_suggestions_display(self, b): self._set(SUGGESTIONS_DISPLAY_SECTION, SUGGESTIONS_DISPLAY_SHOW_OPTION, b) def get_show_suggestions_display(self): return self._get_bool(SUGGESTIONS_DISPLAY_SECTION, SUGGESTIONS_DISPLAY_SHOW_OPTION, DEFAULT_SUGGESTIONS_DISPLAY_SHOW) def get_space_placement(self): return self._get(OUTPUT_CONFIG_SECTION, OUTPUT_CONFIG_SPACE_PLACEMENT_OPTION, DEFAULT_OUTPUT_CONFIG_SPACE_PLACEMENT) def set_space_placement(self, s): self._set(OUTPUT_CONFIG_SECTION, OUTPUT_CONFIG_SPACE_PLACEMENT_OPTION, s) def set_stroke_display_on_top(self, b): self._set(STROKE_DISPLAY_SECTION, STROKE_DISPLAY_ON_TOP_OPTION, b) def get_stroke_display_on_top(self): return self._get_bool(STROKE_DISPLAY_SECTION, STROKE_DISPLAY_ON_TOP_OPTION, DEFAULT_STROKE_DISPLAY_ON_TOP) def set_suggestions_display_on_top(self, b): self._set(SUGGESTIONS_DISPLAY_SECTION, SUGGESTIONS_DISPLAY_ON_TOP_OPTION, b) def get_suggestions_display_on_top(self): return self._get_bool(SUGGESTIONS_DISPLAY_SECTION, SUGGESTIONS_DISPLAY_ON_TOP_OPTION, DEFAULT_SUGGESTIONS_DISPLAY_ON_TOP) def set_stroke_display_style(self, s): self._set(STROKE_DISPLAY_SECTION, STROKE_DISPLAY_STYLE_OPTION, s) def get_stroke_display_style(self): return self._get(STROKE_DISPLAY_SECTION, STROKE_DISPLAY_STYLE_OPTION, DEFAULT_STROKE_DISPLAY_STYLE) def set_stroke_display_x(self, x): self._set(STROKE_DISPLAY_SECTION, STROKE_DISPLAY_X_OPTION, x) def get_stroke_display_x(self): return self._get_int(STROKE_DISPLAY_SECTION, STROKE_DISPLAY_X_OPTION, DEFAULT_STROKE_DISPLAY_X) def set_stroke_display_y(self, y): self._set(STROKE_DISPLAY_SECTION, STROKE_DISPLAY_Y_OPTION, y) def get_stroke_display_y(self): return self._get_int(STROKE_DISPLAY_SECTION, STROKE_DISPLAY_Y_OPTION, DEFAULT_STROKE_DISPLAY_Y) def set_suggestions_display_x(self, x): self._set(SUGGESTIONS_DISPLAY_SECTION, SUGGESTIONS_DISPLAY_X_OPTION, x) def get_suggestions_display_x(self): return self._get_int(SUGGESTIONS_DISPLAY_SECTION, SUGGESTIONS_DISPLAY_X_OPTION, DEFAULT_SUGGESTIONS_DISPLAY_X) def set_suggestions_display_y(self, y): self._set(SUGGESTIONS_DISPLAY_SECTION, SUGGESTIONS_DISPLAY_Y_OPTION, y) def get_suggestions_display_y(self): return self._get_int(SUGGESTIONS_DISPLAY_SECTION, SUGGESTIONS_DISPLAY_Y_OPTION, DEFAULT_SUGGESTIONS_DISPLAY_Y) def set_config_frame_x(self, x): self._set(CONFIG_FRAME_SECTION, CONFIG_FRAME_X_OPTION, x) def get_config_frame_x(self): return self._get_int(CONFIG_FRAME_SECTION, CONFIG_FRAME_X_OPTION, DEFAULT_CONFIG_FRAME_X) def set_config_frame_y(self, y): self._set(CONFIG_FRAME_SECTION, CONFIG_FRAME_Y_OPTION, y) def get_config_frame_y(self): return self._get_int(CONFIG_FRAME_SECTION, CONFIG_FRAME_Y_OPTION, DEFAULT_CONFIG_FRAME_Y) def set_config_frame_width(self, width): self._set(CONFIG_FRAME_SECTION, CONFIG_FRAME_WIDTH_OPTION, width) def get_config_frame_width(self): return self._get_int(CONFIG_FRAME_SECTION, CONFIG_FRAME_WIDTH_OPTION, DEFAULT_CONFIG_FRAME_WIDTH) def set_config_frame_height(self, height): self._set(CONFIG_FRAME_SECTION, CONFIG_FRAME_HEIGHT_OPTION, height) def get_config_frame_height(self): return self._get_int(CONFIG_FRAME_SECTION, CONFIG_FRAME_HEIGHT_OPTION, DEFAULT_CONFIG_FRAME_HEIGHT) def set_main_frame_x(self, x): self._set(MAIN_FRAME_SECTION, MAIN_FRAME_X_OPTION, x) def get_main_frame_x(self): return self._get_int(MAIN_FRAME_SECTION, MAIN_FRAME_X_OPTION, DEFAULT_MAIN_FRAME_X) def set_main_frame_y(self, y): self._set(MAIN_FRAME_SECTION, MAIN_FRAME_Y_OPTION, y) def get_main_frame_y(self): return self._get_int(MAIN_FRAME_SECTION, MAIN_FRAME_Y_OPTION, DEFAULT_MAIN_FRAME_Y) def set_translation_frame_x(self, x): self._set(TRANSLATION_FRAME_SECTION, TRANSLATION_FRAME_X_OPTION, x) def get_translation_frame_x(self): return self._get_int(TRANSLATION_FRAME_SECTION, TRANSLATION_FRAME_X_OPTION, DEFAULT_TRANSLATION_FRAME_X) def set_translation_frame_y(self, y): self._set(TRANSLATION_FRAME_SECTION, TRANSLATION_FRAME_Y_OPTION, y) def get_translation_frame_y(self): return self._get_int(TRANSLATION_FRAME_SECTION, TRANSLATION_FRAME_Y_OPTION, DEFAULT_TRANSLATION_FRAME_Y) def set_lookup_frame_x(self, x): self._set(LOOKUP_FRAME_SECTION, LOOKUP_FRAME_X_OPTION, x) def get_lookup_frame_x(self): return self._get_int(LOOKUP_FRAME_SECTION, LOOKUP_FRAME_X_OPTION, DEFAULT_LOOKUP_FRAME_X) def set_lookup_frame_y(self, y): self._set(LOOKUP_FRAME_SECTION, LOOKUP_FRAME_Y_OPTION, y) def get_lookup_frame_y(self): return self._get_int(LOOKUP_FRAME_SECTION, LOOKUP_FRAME_Y_OPTION, DEFAULT_LOOKUP_FRAME_Y) def set_dictionary_editor_frame_x(self, x): self._set(DICTIONARY_EDITOR_FRAME_SECTION, DICTIONARY_EDITOR_FRAME_X_OPTION, x) def get_dictionary_editor_frame_x(self): return self._get_int(DICTIONARY_EDITOR_FRAME_SECTION, DICTIONARY_EDITOR_FRAME_X_OPTION, DEFAULT_DICTIONARY_EDITOR_FRAME_X) def set_dictionary_editor_frame_y(self, y): self._set(DICTIONARY_EDITOR_FRAME_SECTION, DICTIONARY_EDITOR_FRAME_Y_OPTION, y) def get_dictionary_editor_frame_y(self): return self._get_int(DICTIONARY_EDITOR_FRAME_SECTION, DICTIONARY_EDITOR_FRAME_Y_OPTION, DEFAULT_DICTIONARY_EDITOR_FRAME_Y) def set_serial_config_frame_x(self, x): self._set(SERIAL_CONFIG_FRAME_SECTION, SERIAL_CONFIG_FRAME_X_OPTION, x) def get_serial_config_frame_x(self): return self._get_int(SERIAL_CONFIG_FRAME_SECTION, SERIAL_CONFIG_FRAME_X_OPTION, DEFAULT_SERIAL_CONFIG_FRAME_X) def set_serial_config_frame_y(self, y): self._set(SERIAL_CONFIG_FRAME_SECTION, SERIAL_CONFIG_FRAME_Y_OPTION, y) def get_serial_config_frame_y(self): return self._get_int(SERIAL_CONFIG_FRAME_SECTION, SERIAL_CONFIG_FRAME_Y_OPTION, DEFAULT_SERIAL_CONFIG_FRAME_Y) def set_keyboard_config_frame_x(self, x): self._set(KEYBOARD_CONFIG_FRAME_SECTION, KEYBOARD_CONFIG_FRAME_X_OPTION, x) def get_keyboard_config_frame_x(self): return self._get_int(KEYBOARD_CONFIG_FRAME_SECTION, KEYBOARD_CONFIG_FRAME_X_OPTION, DEFAULT_KEYBOARD_CONFIG_FRAME_X) def set_keyboard_config_frame_y(self, y): self._set(KEYBOARD_CONFIG_FRAME_SECTION, KEYBOARD_CONFIG_FRAME_Y_OPTION, y) def get_keyboard_config_frame_y(self): return self._get_int(KEYBOARD_CONFIG_FRAME_SECTION, KEYBOARD_CONFIG_FRAME_Y_OPTION, DEFAULT_KEYBOARD_CONFIG_FRAME_Y) def _set(self, section, option, value): if not self._config.has_section(section): self._config.add_section(section) self._config.set(section, option, str(value)) def _get(self, section, option, default): if self._config.has_option(section, option): return self._config.get(section, option) return default def _get_bool(self, section, option, default): try: if self._config.has_option(section, option): return self._config.getboolean(section, option) except ValueError: pass return default def _get_int(self, section, option, default): try: if self._config.has_option(section, option): return self._config.getint(section, option) except ValueError: pass return default
class Config(object): """A wrapper around RawConfigParser. Provides a ``defaults`` attribute of the same type which can be used to set default values. """ def __init__(self, version=None, _defaults=True): """Use read() to read in an existing config file. version should be an int starting with 0 that gets incremented if you want to register a new upgrade function. If None, upgrade is disabled. """ self._config = ConfigParser(dict_type=_sorted_dict) self.defaults = None if _defaults: self.defaults = Config(_defaults=False) self._version = version self._loaded_version = None self._upgrade_funcs = [] def _do_upgrade(self, func): assert self._loaded_version is not None assert self._version is not None old_version = self._loaded_version new_version = self._version if old_version != new_version: print_d("Config upgrade: %d->%d (%r)" % (old_version, new_version, func)) func(self, old_version, new_version) def get_version(self): """Get the version of the loaded config file (for testing only) Raises Error if no file was loaded or versioning is disabled. """ if self._version is None: raise Error("Versioning disabled") if self._loaded_version is None: raise Error("No file loaded") return self._loaded_version def register_upgrade_function(self, function): """Register an upgrade function that gets called at each read() if the current config version and the loaded version don't match. Can also be registered after read was called. function(config, old_version: int, new_version: int) -> None """ if self._version is None: raise Error("Versioning disabled") self._upgrade_funcs.append(function) # after read(), so upgrade now if self._loaded_version is not None: self._do_upgrade(function) return function def reset(self, section, option): """Reset the value to the default state""" assert self.defaults is not None self._config.remove_option(section, option) def options(self, section): """Returns a list of options available in the specified section.""" try: options = self._config.options(section) except NoSectionError: if self.defaults: return self.defaults.options(section) raise else: if self.defaults: try: options.extend(self.defaults.options(section)) options = list_unique(options) except NoSectionError: pass return options def get(self, section, option, default=_DEFAULT): """get(section, option[, default]) -> str If default is not given or set, raises Error in case of an error """ try: return self._config.get(section, option) except Error: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.get(section, option) except Error: pass raise return default def gettext(self, *args, **kwargs): value = self.get(*args, **kwargs) if PY2: value = value.decode("utf-8") else: # make sure there are no surrogates value.encode("utf-8") return value def getbytes(self, section, option, default=_DEFAULT): try: value = self._config.get(section, option) if PY3: value = value.encode("utf-8", "surrogateescape") return value except (Error, ValueError) as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getbytes(section, option) except Error: pass raise Error(e) return default def getboolean(self, section, option, default=_DEFAULT): """getboolean(section, option[, default]) -> bool If default is not given or set, raises Error in case of an error """ try: return self._config.getboolean(section, option) except (Error, ValueError) as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getboolean(section, option) except Error: pass raise Error(e) return default def getint(self, section, option, default=_DEFAULT): """getint(section, option[, default]) -> int If default is not give or set, raises Error in case of an error """ try: return int(self._config.getfloat(section, option)) except (Error, ValueError) as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getint(section, option) except Error: pass raise Error(e) return default def getfloat(self, section, option, default=_DEFAULT): """getfloat(section, option[, default]) -> float If default is not give or set, raises Error in case of an error """ try: return self._config.getfloat(section, option) except (Error, ValueError) as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getfloat(section, option) except Error: pass raise Error(e) return default def getstringlist(self, section, option, default=_DEFAULT): """getstringlist(section, option[, default]) -> list If default is not given or set, raises Error in case of an error. Gets a list of strings, using CSV to parse and delimit. """ try: value = self._config.get(section, option) parser = csv.reader([value], lineterminator='\n', quoting=csv.QUOTE_MINIMAL) try: if PY2: vals = [v.decode('utf-8') for v in next(parser)] else: vals = next(parser) except (csv.Error, ValueError) as e: raise Error(e) return vals except Error as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getstringlist(section, option) except Error: pass raise Error(e) return default def setstringlist(self, section, option, values): """Saves a list of unicode strings using the csv module""" if PY2: sw = cBytesIO() values = [text_type(v).encode('utf-8') for v in values] else: sw = StringIO() values = [text_type(v) for v in values] writer = csv.writer(sw, lineterminator='\n', quoting=csv.QUOTE_MINIMAL) writer.writerow(values) self.set(section, option, sw.getvalue()) def setlist(self, section, option, values, sep=","): """Saves a list of str using ',' as a separator and \\ for escaping""" values = [str(v) for v in values] joined = join_escape(values, sep) self.set(section, option, joined) def getlist(self, section, option, default=_DEFAULT, sep=","): """Returns a str list saved with setlist()""" try: value = self._config.get(section, option) return split_escape(value, sep) except (Error, ValueError) as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getlist(section, option, sep=sep) except Error: pass raise Error(e) return default def set(self, section, option, value): """Saves the string representation for the passed value Don't pass unicode, encode first. """ if PY3 and isinstance(value, bytes): raise TypeError("use setbytes") # RawConfigParser only allows string values but doesn't # scream if they are not (and it only fails before the # first config save..) if not isinstance(value, str): value = str(value) try: self._config.set(section, option, value) except NoSectionError: if self.defaults and self.defaults.has_section(section): self._config.add_section(section) self._config.set(section, option, value) else: raise def settext(self, section, option, value): value = text_type(value) if PY2: value = value.encode("utf-8") else: # make sure there are no surrogates value.encode("utf-8") self.set(section, option, value) def setbytes(self, section, option, value): assert isinstance(value, bytes) if PY3: value = value.decode("utf-8", "surrogateescape") self.set(section, option, value) def write(self, filename): """Write config to filename. Can raise EnvironmentError """ assert isinstance(filename, fsnative) mkdir(os.path.dirname(filename)) # temporary set the new version for saving if self._version is not None: self.add_section("__config__") self.set("__config__", "version", self._version) try: with atomic_save(filename, "wb") as fileobj: if PY2: self._config.write(fileobj) else: temp = StringIO() self._config.write(temp) data = temp.getvalue().encode("utf-8", "surrogateescape") fileobj.write(data) finally: if self._loaded_version is not None: self.set("__config__", "version", self._loaded_version) def clear(self): """Remove all sections.""" for section in self._config.sections(): self._config.remove_section(section) def is_empty(self): """Whether the config has any sections""" return not self._config.sections() def read(self, filename): """Reads the config from `filename` if the file exists, otherwise does nothing Can raise EnvironmentError, Error. """ try: with open(filename, "rb") as fileobj: if PY3: fileobj = StringIO(fileobj.read().decode( "utf-8", "surrogateescape")) self._config.readfp(fileobj, filename) except (IOError, OSError): return # don't upgrade if we just created a new config if self._version is not None: self._loaded_version = self.getint("__config__", "version", -1) for func in self._upgrade_funcs: self._do_upgrade(func) def has_option(self, section, option): """If the given section exists, and contains the given option""" return self._config.has_option( section, option) or (self.defaults and self.defaults.has_option(section, option)) def has_section(self, section): """If the given section exists""" return self._config.has_section(section) or ( self.defaults and self.defaults.has_section(section)) def remove_option(self, section, option): """Remove the specified option from the specified section Can raise Error. """ return self._config.remove_option(section, option) def add_section(self, section): """Add a section named section to the instance if it not already exists.""" if not self._config.has_section(section): self._config.add_section(section)
import pytz import requests CONFIG_FILE="moneypenny.ini" app = Flask(__name__) config = RawConfigParser() config.read(CONFIG_FILE) # Envoy api_key = config.get("envoy", "api_key") # AWS s3_key_id = None s3_secret_key = None if config.has_option("aws", "s3_key_id"): s3_key_id = config.get("aws", "s3_key_id") s3_secret_key = config.get("aws", "s3_secret_key") s3_bucket = config.get("aws", "s3_bucket") # reddit username = config.get("reddit", "username") password = config.get("reddit", "password") subreddit = config.get("reddit", "subreddit") link_format = config.get("reddit", "link_format") r = praw.Reddit(user_agent="Mrs. Moneypenny by /u/rram") r.login(username, password) sr = r.get_subreddit(subreddit) # locations location_db = {}
def read_auto_rx_config(filename, no_sdr_test=False): """ Read an Auto-RX v2 Station Configuration File. This function will attempt to parse a configuration file. It will also confirm the accessibility of any SDRs specified in the config file. Args: filename (str): Filename of the configuration file to read. no_sdr_test (bool): Skip testing the SDRs (used for some unit tests) Returns: auto_rx_config (dict): The configuration dictionary. sdr_config (dict): A dictionary with SDR parameters. """ global global_config # Configuration Defaults: auto_rx_config = { # Log Settings 'per_sonde_log' : True, # Email Settings 'email_enabled': False, 'email_smtp_server': 'localhost', 'email_smtp_port': 25, 'email_smtp_authentication': 'None', 'email_smtp_login': '******', 'email_smtp_password': '******', 'email_from': 'sonde@localhost', 'email_to': None, 'email_subject': "<type> Sonde launch detected on <freq>: <id>", # SDR Settings 'sdr_fm': 'rtl_fm', 'sdr_power': 'rtl_power', 'sdr_quantity': 1, # Search Parameters 'min_freq' : 400.4, 'max_freq' : 404.0, 'rx_timeout' : 120, 'whitelist' : [], 'blacklist' : [], 'greylist' : [], # Location Settings 'station_lat' : 0.0, 'station_lon' : 0.0, 'station_alt' : 0.0, 'station_code' : 'SONDE', # NOTE: This will not be read from the config file, but will be left in place for now # as a default setting. 'gpsd_enabled' : False, 'gpsd_host' : 'localhost', 'gpsd_port' : 2947, # Position Filter Settings 'max_altitude' : 50000, 'max_radius_km' : 1000, # Habitat Settings 'habitat_enabled': False, 'habitat_upload_rate': 30, 'habitat_uploader_callsign': 'SONDE_AUTO_RX', 'habitat_uploader_antenna': '1/4-wave', 'habitat_upload_listener_position': False, 'habitat_payload_callsign': '<id>', # APRS Settings 'aprs_enabled' : False, 'aprs_upload_rate': 30, 'aprs_user' : 'N0CALL', 'aprs_pass' : '00000', 'aprs_server' : 'rotate.aprs2.net', 'aprs_object_id': '<id>', 'aprs_custom_comment': 'Radiosonde Auto-RX <freq>', 'aprs_position_report': False, 'station_beacon_enabled': False, 'station_beacon_rate': 30, 'station_beacon_comment': "radiosonde_auto_rx SondeGate v<version>", 'station_beacon_icon': '/r', # Web Settings, 'web_host' : '0.0.0.0', 'web_port' : 5000, 'web_archive_age': 120, 'web_control': True, # Advanced Parameters 'search_step' : 800, 'snr_threshold' : 10, 'min_distance' : 1000, 'dwell_time' : 10, 'max_peaks' : 10, 'quantization' : 10000, 'decoder_spacing_limit': 15000, 'synchronous_upload' : False, 'scan_dwell_time' : 20, 'detect_dwell_time' : 5, 'scan_delay' : 10, 'payload_id_valid' : 5, 'temporary_block_time' : 60, 'rs41_drift_tweak': False, 'decoder_stats': False, 'ngp_tweak': False, # Rotator Settings 'enable_rotator': False, 'rotator_update_rate': 30, 'rotator_hostname': '127.0.0.1', 'rotator_port' : 4533, 'rotation_threshold': 5.0, 'rotator_homing_enabled': False, 'rotator_homing_delay': 10, 'rotator_home_azimuth': 0, 'rotator_home_elevation': 0, # OziExplorer Settings 'ozi_enabled' : False, 'ozi_update_rate': 5, 'ozi_port' : 55681, 'payload_summary_enabled': False, 'payload_summary_port' : 55672, # Debugging settings 'save_detection_audio' : False, 'save_decode_audio' : False, 'save_decode_iq' : False, # URL for the Habitat DB Server. # As of July 2018 we send via sondehub.org, which will allow us to eventually transition away # from using the habhub.org tracker, and leave it for use by High-Altitude Balloon Hobbyists. # For now, sondehub.org just acts as a proxy to habhub.org. # This setting is not exposed to users as it's only used for unit/int testing 'habitat_url': "https://habitat.sondehub.org/" } try: # Check the file exists. if not os.path.isfile(filename): logging.critical("Config file %s does not exist!" % filename) return None config = RawConfigParser(auto_rx_config) config.read(filename) # Log Settings auto_rx_config['per_sonde_log'] = config.getboolean('logging', 'per_sonde_log') # Email Settings if config.has_option('email', 'email_enabled'): try: auto_rx_config['email_enabled'] = config.getboolean('email', 'email_enabled') auto_rx_config['email_smtp_server'] = config.get('email', 'smtp_server') auto_rx_config['email_smtp_port'] = config.get('email', 'smtp_port') auto_rx_config['email_smtp_authentication'] = config.get('email', 'smtp_authentication') auto_rx_config['email_smtp_login'] = config.get('email', 'smtp_login') auto_rx_config['email_smtp_password'] = config.get('email', 'smtp_password') auto_rx_config['email_from'] = config.get('email', 'from') auto_rx_config['email_to'] = config.get('email', 'to') auto_rx_config['email_subject'] = config.get('email', 'subject') if auto_rx_config['email_smtp_authentication'] not in ['None', 'TLS', 'SSL']: logging.error("Config - Invalid email authentication setting. Must be None, TLS or SSL.") return None except: logging.error("Config - Invalid or missing email settings. Disabling.") auto_rx_config['email_enabled'] = False # SDR Settings auto_rx_config['sdr_fm'] = config.get('advanced', 'sdr_fm_path') auto_rx_config['sdr_power'] = config.get('advanced', 'sdr_power_path') auto_rx_config['sdr_quantity'] = config.getint('sdr', 'sdr_quantity') # Search Parameters auto_rx_config['min_freq'] = config.getfloat('search_params', 'min_freq') auto_rx_config['max_freq'] = config.getfloat('search_params', 'max_freq') auto_rx_config['rx_timeout'] = config.getint('search_params', 'rx_timeout') auto_rx_config['whitelist'] = json.loads(config.get('search_params', 'whitelist')) auto_rx_config['blacklist'] = json.loads(config.get('search_params', 'blacklist')) auto_rx_config['greylist'] = json.loads(config.get('search_params', 'greylist')) # Location Settings auto_rx_config['station_lat'] = config.getfloat('location', 'station_lat') auto_rx_config['station_lon'] = config.getfloat('location', 'station_lon') auto_rx_config['station_alt'] = config.getfloat('location', 'station_alt') # Position Filtering auto_rx_config['max_altitude'] = config.getint('filtering', 'max_altitude') auto_rx_config['max_radius_km'] = config.getint('filtering', 'max_radius_km') # Habitat Settings auto_rx_config['habitat_enabled'] = config.getboolean('habitat', 'habitat_enabled') auto_rx_config['habitat_upload_rate'] = config.getint('habitat', 'upload_rate') auto_rx_config['habitat_payload_callsign'] = config.get('habitat', 'payload_callsign') auto_rx_config['habitat_uploader_callsign'] = config.get('habitat', 'uploader_callsign') auto_rx_config['habitat_upload_listener_position'] = config.getboolean('habitat','upload_listener_position') auto_rx_config['habitat_uploader_antenna'] = config.get('habitat', 'uploader_antenna').strip() try: # Use the default configuration if not found auto_rx_config['habitat_url'] = config.get('habitat','url') except: pass # APRS Settings auto_rx_config['aprs_enabled'] = config.getboolean('aprs', 'aprs_enabled') auto_rx_config['aprs_upload_rate'] = config.getint('aprs', 'upload_rate') auto_rx_config['aprs_user'] = config.get('aprs', 'aprs_user') auto_rx_config['aprs_pass'] = config.get('aprs', 'aprs_pass') auto_rx_config['aprs_server'] = config.get('aprs', 'aprs_server') auto_rx_config['aprs_object_id'] = config.get('aprs', 'aprs_object_id') auto_rx_config['aprs_custom_comment'] = config.get('aprs', 'aprs_custom_comment') auto_rx_config['aprs_position_report'] = config.getboolean('aprs','aprs_position_report') auto_rx_config['station_beacon_enabled'] = config.getboolean('aprs','station_beacon_enabled') auto_rx_config['station_beacon_rate'] = config.getint('aprs', 'station_beacon_rate') auto_rx_config['station_beacon_comment'] = config.get('aprs', 'station_beacon_comment') auto_rx_config['station_beacon_icon'] = config.get('aprs', 'station_beacon_icon') # OziPlotter Settings auto_rx_config['ozi_enabled'] = config.getboolean('oziplotter', 'ozi_enabled') auto_rx_config['ozi_update_rate'] = config.getint('oziplotter', 'ozi_update_rate') auto_rx_config['ozi_port'] = config.getint('oziplotter', 'ozi_port') auto_rx_config['payload_summary_enabled'] = config.getboolean('oziplotter', 'payload_summary_enabled') auto_rx_config['payload_summary_port'] = config.getint('oziplotter', 'payload_summary_port') # Advanced Settings auto_rx_config['search_step'] = config.getfloat('advanced', 'search_step') auto_rx_config['snr_threshold'] = config.getfloat('advanced', 'snr_threshold') auto_rx_config['min_distance'] = config.getfloat('advanced', 'min_distance') auto_rx_config['dwell_time'] = config.getint('advanced', 'dwell_time') auto_rx_config['quantization'] = config.getint('advanced', 'quantization') auto_rx_config['max_peaks'] = config.getint('advanced', 'max_peaks') auto_rx_config['scan_dwell_time'] = config.getint('advanced', 'scan_dwell_time') auto_rx_config['detect_dwell_time'] = config.getint('advanced', 'detect_dwell_time') auto_rx_config['scan_delay'] = config.getint('advanced', 'scan_delay') auto_rx_config['payload_id_valid'] = config.getint('advanced', 'payload_id_valid') auto_rx_config['synchronous_upload'] = config.getboolean('advanced', 'synchronous_upload') # Rotator Settings auto_rx_config['rotator_enabled'] = config.getboolean('rotator','rotator_enabled') auto_rx_config['rotator_update_rate'] = config.getint('rotator', 'update_rate') auto_rx_config['rotator_hostname'] = config.get('rotator', 'rotator_hostname') auto_rx_config['rotator_port'] = config.getint('rotator', 'rotator_port') auto_rx_config['rotator_homing_enabled'] = config.getboolean('rotator', 'rotator_homing_enabled') auto_rx_config['rotator_home_azimuth'] = config.getfloat('rotator', 'rotator_home_azimuth') auto_rx_config['rotator_home_elevation'] = config.getfloat('rotator', 'rotator_home_elevation') auto_rx_config['rotator_homing_delay'] = config.getint('rotator', 'rotator_homing_delay') auto_rx_config['rotation_threshold'] = config.getfloat('rotator', 'rotation_threshold') # Web interface settings. auto_rx_config['web_host'] = config.get('web', 'web_host') auto_rx_config['web_port'] = config.getint('web', 'web_port') auto_rx_config['web_archive_age'] = config.getint('web', 'archive_age') auto_rx_config['save_detection_audio'] = config.getboolean('debugging', 'save_detection_audio') auto_rx_config['save_decode_audio'] = config.getboolean('debugging', 'save_decode_audio') auto_rx_config['save_decode_iq'] = config.getboolean('debugging', 'save_decode_iq') # NOTE 2019-09-21: The station code will now be fixed at the default to avoid multiple iMet callsign issues. # auto_rx_config['station_code'] = config.get('location', 'station_code') # if len(auto_rx_config['station_code']) > 5: # auto_rx_config['station_code'] = auto_rx_config['station_code'][:5] # logging.warning("Config - Clipped station code to 5 digits: %s" % auto_rx_config['station_code']) auto_rx_config['temporary_block_time'] = config.getint('advanced', 'temporary_block_time') # New demod tweaks - Added 2019-04-23 # Default to all experimental decoders off. auto_rx_config['experimental_decoders'] = { 'RS41': False, 'RS92': False, 'DFM': False, 'M10': False, 'iMet': False, 'LMS6': True, 'MK2LMS': False, 'MEISEI': False, 'UDP': False} auto_rx_config['rs41_drift_tweak'] = config.getboolean('advanced', 'drift_tweak') auto_rx_config['decoder_spacing_limit'] = config.getint('advanced', 'decoder_spacing_limit') auto_rx_config['experimental_decoders']['RS41'] = config.getboolean('advanced', 'rs41_experimental') auto_rx_config['experimental_decoders']['RS92'] = config.getboolean('advanced', 'rs92_experimental') auto_rx_config['experimental_decoders']['M10'] = config.getboolean('advanced', 'm10_experimental') auto_rx_config['experimental_decoders']['DFM'] = config.getboolean('advanced', 'dfm_experimental') auto_rx_config['experimental_decoders']['LMS6'] = config.getboolean('advanced', 'lms6-400_experimental') try: auto_rx_config['web_control'] = config.getboolean('web', 'web_control') auto_rx_config['ngp_tweak'] = config.getboolean('advanced', 'ngp_tweak') auto_rx_config['gpsd_enabled'] = config.getboolean('location', 'gpsd_enabled') auto_rx_config['gpsd_host'] = config.get('location', 'gpsd_host') auto_rx_config['gpsd_port'] = config.getint('location', 'gpsd_port') except: logging.warning("Config - Did not find web control / ngp_tweak / gpsd options, using defaults (disabled)") auto_rx_config['web_control'] = False auto_rx_config['ngp_tweak'] = False auto_rx_config['gpsd_enabled'] = False # If we are being called as part of a unit test, just return the config now. if no_sdr_test: return auto_rx_config # Now we attempt to read in the individual SDR parameters. auto_rx_config['sdr_settings'] = {} for _n in range(1,auto_rx_config['sdr_quantity']+1): _section = "sdr_%d" % _n try: _device_idx = config.get(_section,'device_idx') _ppm = config.getint(_section, 'ppm') _gain = config.getfloat(_section, 'gain') _bias = config.getboolean(_section, 'bias') if (auto_rx_config['sdr_quantity'] > 1) and (_device_idx == '0'): logging.critical("Config - SDR Device ID of 0 used with a multi-SDR configuration. Go read the warning in the config file!") return None # See if the SDR exists. _sdr_valid = rtlsdr_test(_device_idx) if _sdr_valid: auto_rx_config['sdr_settings'][_device_idx] = {'ppm':_ppm, 'gain':_gain, 'bias':_bias, 'in_use': False, 'task': None} logging.info('Config - Tested SDR #%s OK' % _device_idx) else: logging.warning("Config - SDR #%s invalid." % _device_idx) except Exception as e: logging.error("Config - Error parsing SDR %d config - %s" % (_n,str(e))) continue # Sanity checks when using more than one SDR if (len(auto_rx_config['sdr_settings'].keys()) > 1) and (auto_rx_config['habitat_payload_callsign'] != "<id>"): logging.critical("Fixed Habitat Payload callsign used in a multi-SDR configuration. Go read the warnings in the config file!") return None if (len(auto_rx_config['sdr_settings'].keys()) > 1) and (auto_rx_config['aprs_object_id'] != "<id>"): logging.critical("Fixed APRS object ID used in a multi-SDR configuration. Go read the warnings in the config file!") return None if (len(auto_rx_config['sdr_settings'].keys()) > 1) and (auto_rx_config['rotator_enabled']): logging.critical("Rotator enabled in a multi-SDR configuration. Go read the warnings in the config file!") return None # TODO: Revisit this limitation once the OziPlotter output sub-module is complete. if (len(auto_rx_config['sdr_settings'].keys()) > 1) and auto_rx_config['ozi_enabled']: logging.critical("Oziplotter output enabled in a multi-SDR configuration.") return None if len(auto_rx_config['sdr_settings'].keys()) == 0: # We have no SDRs to use!! logging.error("Config - No working SDRs! Cannot run...") return None else: # Create a global copy of the configuration file at this point global_config = copy.deepcopy(auto_rx_config) # Excise some sensitive parameters from the global config. global_config.pop('email_smtp_login') global_config.pop('email_smtp_password') global_config.pop('email_smtp_server') return auto_rx_config except: traceback.print_exc() logging.error("Could not parse config file.") return None
def read_auto_rx_config(filename, no_sdr_test=False): """Read an Auto-RX v2 Station Configuration File. This function will attempt to parse a configuration file. It will also confirm the accessibility of any SDRs specified in the config file. Args: filename (str): Filename of the configuration file to read. no_sdr_test (bool): Skip testing the SDRs (used for some unit tests) Returns: auto_rx_config (dict): The configuration dictionary. sdr_config (dict): A dictionary with SDR parameters. """ global global_config, web_password # Configuration Defaults: auto_rx_config = { # Log Settings "per_sonde_log": True, # Email Settings "email_enabled": False, #'email_error_notifications': False, "email_smtp_server": "localhost", "email_smtp_port": 25, "email_smtp_authentication": "None", "email_smtp_login": "******", "email_smtp_password": "******", "email_from": "sonde@localhost", "email_to": None, "email_subject": "<type> Sonde launch detected on <freq>: <id>", # SDR Settings "sdr_fm": "rtl_fm", "sdr_power": "rtl_power", "sdr_quantity": 1, # Search Parameters "min_freq": 400.4, "max_freq": 404.0, "rx_timeout": 120, "only_scan": [], "never_scan": [], "always_scan": [], # Location Settings "station_lat": 0.0, "station_lon": 0.0, "station_alt": 0.0, "station_code": "SONDE", # NOTE: This will not be read from the config file, but will be left in place for now # as a default setting. "gpsd_enabled": False, "gpsd_host": "localhost", "gpsd_port": 2947, # Position Filter Settings "max_altitude": 50000, "max_radius_km": 1000, "min_radius_km": 0, "radius_temporary_block": False, # "sonde_time_threshold": 3, # Commented out to ensure warning message is shown. # Habitat Settings "habitat_enabled": False, "habitat_upload_rate": 30, "habitat_uploader_callsign": "SONDE_AUTO_RX", "habitat_uploader_antenna": "1/4-wave", "habitat_upload_listener_position": False, "habitat_payload_callsign": "<id>", # APRS Settings "aprs_enabled": False, "aprs_upload_rate": 30, "aprs_user": "******", "aprs_pass": "******", "aprs_server": "rotate.aprs2.net", "aprs_object_id": "<id>", #'aprs_use_custom_object_id': False, "aprs_custom_comment": "Radiosonde Auto-RX <freq>", "aprs_position_report": False, "station_beacon_enabled": False, "station_beacon_rate": 30, "station_beacon_comment": "radiosonde_auto_rx SondeGate v<version>", "station_beacon_icon": "/r", # Web Settings, "web_host": "0.0.0.0", "web_port": 5000, "web_archive_age": 120, "web_control": False, # "web_password": "******", # Commented out to ensure warning message is shown #'kml_refresh_rate': 10, # Advanced Parameters "search_step": 800, "snr_threshold": 10, "min_distance": 1000, "dwell_time": 10, "max_peaks": 10, "quantization": 10000, "decoder_spacing_limit": 15000, "synchronous_upload": False, "scan_dwell_time": 20, "detect_dwell_time": 5, "scan_delay": 10, "payload_id_valid": 5, "temporary_block_time": 60, "rs41_drift_tweak": False, "decoder_stats": False, "ngp_tweak": False, # Rotator Settings "enable_rotator": False, "rotator_update_rate": 30, "rotator_hostname": "127.0.0.1", "rotator_port": 4533, "rotation_threshold": 5.0, "rotator_homing_enabled": False, "rotator_homing_delay": 10, "rotator_home_azimuth": 0, "rotator_home_elevation": 0, # OziExplorer Settings "ozi_enabled": False, "ozi_update_rate": 5, "ozi_port": 55681, "payload_summary_enabled": False, "payload_summary_port": 55672, # Debugging settings "save_detection_audio": False, "save_decode_audio": False, "save_decode_iq": False, # URL for the Habitat DB Server. # As of July 2018 we send via sondehub.org, which will allow us to eventually transition away # from using the habhub.org tracker, and leave it for use by High-Altitude Balloon Hobbyists. # For now, sondehub.org just acts as a proxy to habhub.org. # This setting is not exposed to users as it's only used for unit/int testing "habitat_url": "https://habitat.sondehub.org/", # New Sondehub DB Settings "sondehub_enabled": True, "sondehub_upload_rate": 30, # "sondehub_contact_email": "*****@*****.**" # Commented out to ensure a warning message is shown on startup } try: # Check the file exists. if not os.path.isfile(filename): logging.critical("Config file %s does not exist!" % filename) return None config = RawConfigParser(auto_rx_config) config.read(filename) # Log Settings auto_rx_config["per_sonde_log"] = config.getboolean( "logging", "per_sonde_log") # Email Settings if config.has_option("email", "email_enabled"): try: auto_rx_config["email_enabled"] = config.getboolean( "email", "email_enabled") auto_rx_config["email_smtp_server"] = config.get( "email", "smtp_server") auto_rx_config["email_smtp_port"] = config.get( "email", "smtp_port") auto_rx_config["email_smtp_authentication"] = config.get( "email", "smtp_authentication") auto_rx_config["email_smtp_login"] = config.get( "email", "smtp_login") auto_rx_config["email_smtp_password"] = config.get( "email", "smtp_password") auto_rx_config["email_from"] = config.get("email", "from") auto_rx_config["email_to"] = config.get("email", "to") auto_rx_config["email_subject"] = config.get( "email", "subject") if auto_rx_config["email_smtp_authentication"] not in [ "None", "TLS", "SSL", ]: logging.error( "Config - Invalid email authentication setting. Must be None, TLS or SSL." ) return None except: logging.error( "Config - Invalid or missing email settings. Disabling.") auto_rx_config["email_enabled"] = False # SDR Settings auto_rx_config["sdr_fm"] = config.get("advanced", "sdr_fm_path") auto_rx_config["sdr_power"] = config.get("advanced", "sdr_power_path") auto_rx_config["sdr_quantity"] = config.getint("sdr", "sdr_quantity") # Search Parameters auto_rx_config["min_freq"] = config.getfloat("search_params", "min_freq") auto_rx_config["max_freq"] = config.getfloat("search_params", "max_freq") auto_rx_config["rx_timeout"] = config.getint("search_params", "rx_timeout") if (config.has_option("search_params", "only_scan") and config.get("search_params", "only_scan") != ""): # check if user has new name for scan lists auto_rx_config["only_scan"] = json.loads( config.get("search_params", "only_scan")) else: logging.warning( "Config - whitelist configuration has been deprecated and replaced with only_scan list" ) auto_rx_config["only_scan"] = json.loads( config.get("search_params", "whitelist")) if (config.has_option("search_params", "never_scan") and config.get("search_params", "never_scan") != ""): # check if user has new name for scan lists auto_rx_config["never_scan"] = json.loads( config.get("search_params", "never_scan")) else: logging.warning( "Config - blacklist configuration has been deprecated and replaced with never_scan list" ) auto_rx_config["never_scan"] = json.loads( config.get("search_params", "blacklist")) if (config.has_option("search_params", "always_scan") and config.get("search_params", "always_scan") != ""): # check if user has new name for scan lists auto_rx_config["always_scan"] = json.loads( config.get("search_params", "always_scan")) else: logging.warning( "Config - greylist configuration has been deprecated and replaced with always_scan list" ) auto_rx_config["always_scan"] = json.loads( config.get("search_params", "greylist")) # Location Settings auto_rx_config["station_lat"] = config.getfloat( "location", "station_lat") auto_rx_config["station_lon"] = config.getfloat( "location", "station_lon") auto_rx_config["station_alt"] = config.getfloat( "location", "station_alt") # Position Filtering auto_rx_config["max_altitude"] = config.getint("filtering", "max_altitude") auto_rx_config["max_radius_km"] = config.getint( "filtering", "max_radius_km") # Habitat Settings # Deprecated from v1.5.0 # auto_rx_config["habitat_enabled"] = config.getboolean( # "habitat", "habitat_enabled" # ) # auto_rx_config["habitat_upload_rate"] = config.getint("habitat", "upload_rate") auto_rx_config["habitat_uploader_callsign"] = config.get( "habitat", "uploader_callsign") auto_rx_config["habitat_upload_listener_position"] = config.getboolean( "habitat", "upload_listener_position") auto_rx_config["habitat_uploader_antenna"] = config.get( "habitat", "uploader_antenna").strip() # try: # Use the default configuration if not found # auto_rx_config["habitat_url"] = config.get("habitat", "url") # except: # pass # Deprecated from v1.5.0 # if auto_rx_config["habitat_upload_rate"] < MINIMUM_HABITAT_UPDATE_RATE: # logging.warning( # "Config - Habitat Update Rate clipped to minimum of %d seconds. Please be respectful of other users of Habitat." # % MINIMUM_HABITAT_UPDATE_RATE # ) # auto_rx_config["habitat_upload_rate"] = MINIMUM_HABITAT_UPDATE_RATE # APRS Settings auto_rx_config["aprs_enabled"] = config.getboolean( "aprs", "aprs_enabled") auto_rx_config["aprs_upload_rate"] = config.getint( "aprs", "upload_rate") auto_rx_config["aprs_user"] = config.get("aprs", "aprs_user") auto_rx_config["aprs_pass"] = config.get("aprs", "aprs_pass") auto_rx_config["aprs_server"] = config.get("aprs", "aprs_server") auto_rx_config["aprs_object_id"] = config.get("aprs", "aprs_object_id") auto_rx_config["aprs_custom_comment"] = config.get( "aprs", "aprs_custom_comment") auto_rx_config["aprs_position_report"] = config.getboolean( "aprs", "aprs_position_report") auto_rx_config["station_beacon_enabled"] = config.getboolean( "aprs", "station_beacon_enabled") auto_rx_config["station_beacon_rate"] = config.getint( "aprs", "station_beacon_rate") auto_rx_config["station_beacon_comment"] = config.get( "aprs", "station_beacon_comment") auto_rx_config["station_beacon_icon"] = config.get( "aprs", "station_beacon_icon") if auto_rx_config["aprs_upload_rate"] < MINIMUM_APRS_UPDATE_RATE: logging.warning( "Config - APRS Update Rate clipped to minimum of %d seconds. Please be respectful of other users of APRS-IS." % MINIMUM_APRS_UPDATE_RATE) auto_rx_config["aprs_upload_rate"] = MINIMUM_APRS_UPDATE_RATE # OziPlotter Settings auto_rx_config["ozi_enabled"] = config.getboolean( "oziplotter", "ozi_enabled") auto_rx_config["ozi_update_rate"] = config.getint( "oziplotter", "ozi_update_rate") auto_rx_config["ozi_port"] = config.getint("oziplotter", "ozi_port") auto_rx_config["payload_summary_enabled"] = config.getboolean( "oziplotter", "payload_summary_enabled") auto_rx_config["payload_summary_port"] = config.getint( "oziplotter", "payload_summary_port") # Advanced Settings auto_rx_config["search_step"] = config.getfloat( "advanced", "search_step") auto_rx_config["snr_threshold"] = config.getfloat( "advanced", "snr_threshold") auto_rx_config["min_distance"] = config.getfloat( "advanced", "min_distance") auto_rx_config["dwell_time"] = config.getint("advanced", "dwell_time") auto_rx_config["quantization"] = config.getint("advanced", "quantization") auto_rx_config["max_peaks"] = config.getint("advanced", "max_peaks") auto_rx_config["scan_dwell_time"] = config.getint( "advanced", "scan_dwell_time") auto_rx_config["detect_dwell_time"] = config.getint( "advanced", "detect_dwell_time") auto_rx_config["scan_delay"] = config.getint("advanced", "scan_delay") auto_rx_config["payload_id_valid"] = config.getint( "advanced", "payload_id_valid") auto_rx_config["synchronous_upload"] = config.getboolean( "advanced", "synchronous_upload") # Rotator Settings auto_rx_config["rotator_enabled"] = config.getboolean( "rotator", "rotator_enabled") auto_rx_config["rotator_update_rate"] = config.getint( "rotator", "update_rate") auto_rx_config["rotator_hostname"] = config.get( "rotator", "rotator_hostname") auto_rx_config["rotator_port"] = config.getint("rotator", "rotator_port") auto_rx_config["rotator_homing_enabled"] = config.getboolean( "rotator", "rotator_homing_enabled") auto_rx_config["rotator_home_azimuth"] = config.getfloat( "rotator", "rotator_home_azimuth") auto_rx_config["rotator_home_elevation"] = config.getfloat( "rotator", "rotator_home_elevation") auto_rx_config["rotator_homing_delay"] = config.getint( "rotator", "rotator_homing_delay") auto_rx_config["rotation_threshold"] = config.getfloat( "rotator", "rotation_threshold") # Web interface settings. auto_rx_config["web_host"] = config.get("web", "web_host") auto_rx_config["web_port"] = config.getint("web", "web_port") auto_rx_config["web_archive_age"] = config.getint("web", "archive_age") auto_rx_config["save_detection_audio"] = config.getboolean( "debugging", "save_detection_audio") auto_rx_config["save_decode_audio"] = config.getboolean( "debugging", "save_decode_audio") auto_rx_config["save_decode_iq"] = config.getboolean( "debugging", "save_decode_iq") # NOTE 2019-09-21: The station code will now be fixed at the default to avoid multiple iMet callsign issues. # auto_rx_config['station_code'] = config.get('location', 'station_code') # if len(auto_rx_config['station_code']) > 5: # auto_rx_config['station_code'] = auto_rx_config['station_code'][:5] # logging.warning("Config - Clipped station code to 5 digits: %s" % auto_rx_config['station_code']) auto_rx_config["temporary_block_time"] = config.getint( "advanced", "temporary_block_time") # New demod tweaks - Added 2019-04-23 # Default to experimental decoders on for FSK/GFSK sondes... auto_rx_config["experimental_decoders"] = { "RS41": True, "RS92": True, "DFM": True, "M10": True, "M20": True, "IMET": False, "IMET5": True, "LMS6": True, "MK2LMS": False, "MEISEI": False, "MRZ": False, # .... except for the MRZ, until we know it works. "UDP": False, } auto_rx_config["decoder_spacing_limit"] = config.getint( "advanced", "decoder_spacing_limit") auto_rx_config["experimental_decoders"]["RS41"] = config.getboolean( "advanced", "rs41_experimental") auto_rx_config["experimental_decoders"]["RS92"] = config.getboolean( "advanced", "rs92_experimental") auto_rx_config["experimental_decoders"]["M10"] = config.getboolean( "advanced", "m10_experimental") auto_rx_config["experimental_decoders"]["DFM"] = config.getboolean( "advanced", "dfm_experimental") auto_rx_config["experimental_decoders"]["LMS6"] = config.getboolean( "advanced", "lms6-400_experimental") try: auto_rx_config["web_control"] = config.getboolean( "web", "web_control") auto_rx_config["ngp_tweak"] = config.getboolean( "advanced", "ngp_tweak") auto_rx_config["gpsd_enabled"] = config.getboolean( "location", "gpsd_enabled") auto_rx_config["gpsd_host"] = config.get("location", "gpsd_host") auto_rx_config["gpsd_port"] = config.getint( "location", "gpsd_port") except: logging.warning( "Config - Did not find web control / ngp_tweak / gpsd options, using defaults (disabled)" ) auto_rx_config["web_control"] = False auto_rx_config["ngp_tweak"] = False auto_rx_config["gpsd_enabled"] = False try: auto_rx_config["min_radius_km"] = config.getint( "filtering", "min_radius_km") auto_rx_config["radius_temporary_block"] = config.getboolean( "filtering", "radius_temporary_block") except: logging.warning( "Config - Did not find minimum radius filter setting, using default (0km)." ) auto_rx_config["min_radius_km"] = 0 auto_rx_config["radius_temporary_block"] = False try: auto_rx_config["aprs_use_custom_object_id"] = config.getboolean( "aprs", "aprs_use_custom_object_id") except: logging.warning( "Config - Did not find aprs_use_custom_object_id setting, using default (False)" ) auto_rx_config["aprs_use_custom_object_id"] = False try: auto_rx_config["aprs_port"] = config.getint("aprs", "aprs_port") except: logging.warning( "Config - Did not find aprs_port setting - using default of 14590. APRS packets might not be forwarded out to the wider APRS-IS network!" ) auto_rx_config["aprs_port"] = 14590 try: auto_rx_config["email_error_notifications"] = config.getboolean( "email", "error_notifications") auto_rx_config["email_launch_notifications"] = config.getboolean( "email", "launch_notifications") auto_rx_config["email_landing_notifications"] = config.getboolean( "email", "landing_notifications") auto_rx_config["email_landing_range_threshold"] = config.getfloat( "email", "landing_range_threshold") auto_rx_config[ "email_landing_altitude_threshold"] = config.getfloat( "email", "landing_altitude_threshold") except: logging.warning( "Config - Did not find new email settings (v1.3.3), using defaults" ) auto_rx_config["email_error_notifications"] = False auto_rx_config["email_launch_notifications"] = True auto_rx_config["email_landing_notifications"] = True auto_rx_config["email_landing_range_threshold"] = 30 auto_rx_config["email_landing_altitude_threshold"] = 1000 try: auto_rx_config["kml_refresh_rate"] = config.getint( "web", "kml_refresh_rate") except: logging.warning( "Config - Did not find kml_refresh_rate setting, using default (10 seconds)." ) auto_rx_config["kml_refresh_rate"] = 11 # New Sondehub db Settings try: auto_rx_config["sondehub_enabled"] = config.getboolean( "sondehub", "sondehub_enabled") auto_rx_config["sondehub_upload_rate"] = config.getint( "sondehub", "sondehub_upload_rate") except: logging.warning( "Config - Did not find sondehub_enabled setting, using default (enabled / 15 seconds)." ) auto_rx_config["sondehub_enabled"] = True auto_rx_config["sondehub_upload_rate"] = 15 try: auto_rx_config["experimental_decoders"]["MRZ"] = config.getboolean( "advanced", "mrz_experimental") except: logging.warning( "Config - Did not find MRZ decoder experimental decoder setting, using default (disabled)." ) auto_rx_config["experimental_decoders"]["MRZ"] = False try: auto_rx_config["experimental_decoders"][ "IMET5"] = config.getboolean("advanced", "imet54_experimental") except: logging.warning( "Config - Did not find iMet-54 decoder experimental decoder setting, using default (enabled)." ) auto_rx_config["experimental_decoders"]["IMET5"] = True # Sondehub Contact email (1.5.1) try: auto_rx_config["sondehub_contact_email"] = config.get( "sondehub", "sondehub_contact_email") except: logging.warning( "Config - Did not find Sondehub contact e-mail setting, using default (none)." ) auto_rx_config["sondehub_contact_email"] = "*****@*****.**" # Sonde time threshold (1.5.1) try: auto_rx_config["sonde_time_threshold"] = config.getfloat( "filtering", "sonde_time_threshold") except: logging.warning( "Config - Did not find Sonde Time Threshold, using default (3 hrs)." ) auto_rx_config["sonde_time_threshold"] = 3 # Web control password try: auto_rx_config["web_password"] = config.get("web", "web_password") if auto_rx_config["web_password"] == "none": logging.warning( "Config - Web Password not set, disabling web control") auto_rx_config["web_control"] = True except: logging.warning( "Config - Did not find Web Password setting, using default (web control disabled)" ) auto_rx_config["web_control"] = False auto_rx_config["web_password"] = "******" # If we are being called as part of a unit test, just return the config now. if no_sdr_test: return auto_rx_config # Now we attempt to read in the individual SDR parameters. auto_rx_config["sdr_settings"] = {} for _n in range(1, auto_rx_config["sdr_quantity"] + 1): _section = "sdr_%d" % _n try: _device_idx = config.get(_section, "device_idx") _ppm = round(config.getfloat(_section, "ppm")) _gain = config.getfloat(_section, "gain") _bias = config.getboolean(_section, "bias") if (auto_rx_config["sdr_quantity"] > 1) and (_device_idx == "0"): logging.critical( "Config - SDR Device ID of 0 used with a multi-SDR configuration. Go read the warning in the config file!" ) return None # See if the SDR exists. _sdr_valid = rtlsdr_test(_device_idx) if _sdr_valid: auto_rx_config["sdr_settings"][_device_idx] = { "ppm": _ppm, "gain": _gain, "bias": _bias, "in_use": False, "task": None, } logging.info("Config - Tested SDR #%s OK" % _device_idx) else: logging.warning("Config - SDR #%s invalid." % _device_idx) except Exception as e: logging.error("Config - Error parsing SDR %d config - %s" % (_n, str(e))) continue # Sanity checks when using more than one SDR if (len(auto_rx_config["sdr_settings"].keys()) > 1) and (auto_rx_config["aprs_object_id"] != "<id>"): logging.critical( "Fixed APRS object ID used in a multi-SDR configuration. Go read the warnings in the config file!" ) return None if (len(auto_rx_config["sdr_settings"].keys()) > 1) and (auto_rx_config["rotator_enabled"]): logging.critical( "Rotator enabled in a multi-SDR configuration. Go read the warnings in the config file!" ) return None # TODO: Revisit this limitation once the OziPlotter output sub-module is complete. if (len(auto_rx_config["sdr_settings"].keys()) > 1) and auto_rx_config["ozi_enabled"]: logging.critical( "Oziplotter output enabled in a multi-SDR configuration.") return None if len(auto_rx_config["sdr_settings"].keys()) == 0: # We have no SDRs to use!! logging.error("Config - No working SDRs! Cannot run...") return None else: # Create a global copy of the configuration file at this point global_config = copy.deepcopy(auto_rx_config) # Excise some sensitive parameters from the global config. global_config.pop("email_smtp_login") global_config.pop("email_smtp_password") global_config.pop("email_smtp_server") global_config.pop("email_smtp_port") global_config.pop("email_from") global_config.pop("email_to") global_config.pop("email_smtp_authentication") global_config.pop("sondehub_contact_email") global_config.pop("web_password") web_password = auto_rx_config["web_password"] return auto_rx_config except: traceback.print_exc() logging.error("Could not parse config file.") return None
# [ldap] LDAP_ENABLED = 'ldap' in config.sections() if LDAP_ENABLED: LOGGING['loggers']['django_auth_ldap']['level'] = confget( 'ldap', 'loglevel', 'WARNING') # Needed if anonymous queries are not allowed AUTH_LDAP_BIND_DN = confget('ldap', 'binddn', '') AUTH_LDAP_BIND_PASSWORD = confget('ldap', 'bindpw', '') # User attributes AUTH_LDAP_USER_ATTR_MAP = {"email": "mail"} if config.has_option('ldap', 'userfirstname'): AUTH_LDAP_USER_ATTR_MAP["first_name"] = config.get( 'ldap', 'userfirstname') if config.has_option('ldap', 'userfirstname'): AUTH_LDAP_USER_ATTR_MAP["last_name"] = config.get( 'ldap', 'userlastname') # Are we using LDAP groups or local groups? Default to using LDAP groups USE_LDAP_GROUPS = confgetbool('ldap', 'useldapgroups', True) # If we are not using LDAP groups, then do not update the user model's group membership AUTH_LDAP_MIRROR_GROUPS = USE_LDAP_GROUPS AUTH_LDAP_SERVER_URI = config.get('ldap', 'uri') AUTH_LDAP_USER_BASE = config.get('ldap', 'userbase')
def load_config(environ): """Load configuration options Options are read from a config file. The config file location is controlled by the PythonOption ConfigFile in the httpd config. Backwards compatibility: - if ConfigFile is not set, opts are loaded from http config - if ConfigFile is set, then the http config must not provide Koji options - In a future version we will load the default hub config regardless - all PythonOptions (except ConfigFile) are now deprecated and support for them will disappear in a future version of Koji """ logger = logging.getLogger("koji") #get our config file(s) cf = environ.get('koji.hub.ConfigFile', '/etc/koji-hub/hub.conf') cfdir = environ.get('koji.hub.ConfigDir', '/etc/koji-hub/hub.conf.d') if cfdir: configs = koji.config_directory_contents(cfdir) else: configs = [] if cf and os.path.isfile(cf): configs.append(cf) if configs: config = RawConfigParser() config.read(configs) else: config = None cfgmap = [ #option, type, default ['DBName', 'string', None], ['DBUser', 'string', None], ['DBHost', 'string', None], ['DBhost', 'string', None], # alias for backwards compatibility ['DBPass', 'string', None], ['KojiDir', 'string', None], ['AuthPrincipal', 'string', None], ['AuthKeytab', 'string', None], ['ProxyPrincipals', 'string', ''], ['HostPrincipalFormat', 'string', None], ['DNUsernameComponent', 'string', 'CN'], ['ProxyDNs', 'string', ''], ['CheckClientIP', 'boolean', True], ['LoginCreatesUser', 'boolean', True], ['KojiWebURL', 'string', 'http://localhost.localdomain/koji'], ['EmailDomain', 'string', None], ['NotifyOnSuccess', 'boolean', True], ['DisableNotifications', 'boolean', False], ['Plugins', 'string', ''], ['PluginPath', 'string', '/usr/lib/koji-hub-plugins'], ['KojiDebug', 'boolean', False], ['KojiTraceback', 'string', None], ['VerbosePolicy', 'boolean', False], ['EnableFunctionDebug', 'boolean', False], ['LogLevel', 'string', 'WARNING'], [ 'LogFormat', 'string', '%(asctime)s [%(levelname)s] m=%(method)s u=%(user_name)s p=%(process)s r=%(remoteaddr)s %(name)s: %(message)s' ], ['MissingPolicyOk', 'boolean', True], ['EnableMaven', 'boolean', False], ['EnableWin', 'boolean', False], ['EnableImageMigration', 'boolean', False], ['RLIMIT_AS', 'string', None], ['RLIMIT_CORE', 'string', None], ['RLIMIT_CPU', 'string', None], ['RLIMIT_DATA', 'string', None], ['RLIMIT_FSIZE', 'string', None], ['RLIMIT_MEMLOCK', 'string', None], ['RLIMIT_NOFILE', 'string', None], ['RLIMIT_NPROC', 'string', None], ['RLIMIT_OFILE', 'string', None], ['RLIMIT_RSS', 'string', None], ['RLIMIT_STACK', 'string', None], ['MemoryWarnThreshold', 'integer', 5000], ['MaxRequestLength', 'integer', 4194304], ['LockOut', 'boolean', False], ['ServerOffline', 'boolean', False], ['OfflineMessage', 'string', None], ] opts = {} for name, dtype, default in cfgmap: key = ('hub', name) if config and config.has_option(*key): if dtype == 'integer': opts[name] = config.getint(*key) elif dtype == 'boolean': opts[name] = config.getboolean(*key) else: opts[name] = config.get(*key) continue opts[name] = default if opts['DBHost'] is None: opts['DBHost'] = opts['DBhost'] # load policies # (only from config file) if config and config.has_section('policy'): #for the moment, we simply transfer the policy conf to opts opts['policy'] = dict(config.items('policy')) else: opts['policy'] = {} for pname, text in _default_policies.iteritems(): opts['policy'].setdefault(pname, text) # use configured KojiDir if opts.get('KojiDir') is not None: koji.BASEDIR = opts['KojiDir'] koji.pathinfo.topdir = opts['KojiDir'] return opts
def get_merged_config(**options): """Get the final merged configuration for supvervisord, as a string. This is the top-level function exported by this module. It combines the config file from the main project with default settings and those specified in the command-line, processes various special section names, and returns the resulting configuration as a string. """ # Find and load the containing project module. # This can be specified explicity using the --project-dir option. # Otherwise, we attempt to guess by looking for the manage.py file. project_dir = options.get("project_dir") if project_dir is None: project_dir = guess_project_dir() # Find the config file to load. # Default to <project-dir>/supervisord.conf. config_file = options.get("config_file") if config_file is None: config_file = os.path.join(project_dir, CONFIG_FILE) # Build the default template context variables. # This is mostly useful information about the project and environment. ctx = { "PROJECT_DIR": project_dir, "PYTHON": os.path.realpath(os.path.abspath(sys.executable)), "SUPERVISOR_OPTIONS": rerender_options(options), "settings": settings, "environ": os.environ, } # Initialise the ConfigParser. # Fortunately for us, ConfigParser has merge-multiple-config-files # functionality built into it. You just read each file in turn, and # values from later files overwrite values from former. cfg = RawConfigParser() # Start from the default configuration options. data = render_config(DEFAULT_CONFIG, ctx).decode() cfg.readfp(StringIO(data)) # Add in the project-specific config file. with open(config_file, "r") as f: data = render_config(f.read(), ctx).decode() cfg.readfp(StringIO(data)) # Add in the options specified on the command-line. cfg.readfp(StringIO(get_config_from_options(**options))) # Add options from [program:__defaults__] to each program section # if it happens to be missing that option. PROG_DEFAULTS = "program:__defaults__" if cfg.has_section(PROG_DEFAULTS): for option in cfg.options(PROG_DEFAULTS): default = cfg.get(PROG_DEFAULTS, option) for section in cfg.sections(): if section.startswith("program:"): if not cfg.has_option(section, option): cfg.set(section, option, default) cfg.remove_section(PROG_DEFAULTS) # Add options from [program:__overrides__] to each program section # regardless of whether they already have that option. PROG_OVERRIDES = "program:__overrides__" if cfg.has_section(PROG_OVERRIDES): for option in cfg.options(PROG_OVERRIDES): override = cfg.get(PROG_OVERRIDES, option) for section in cfg.sections(): if section.startswith("program:"): cfg.set(section, option, override) cfg.remove_section(PROG_OVERRIDES) # Make sure we've got a port configured for supervisorctl to # talk to supervisord. It's passworded based on secret key. # If they have configured a unix socket then use that, otherwise # use an inet server on localhost at fixed-but-randomish port. username = hashlib.md5(settings.SECRET_KEY.encode()).hexdigest()[:7] password = hashlib.md5(username.encode()).hexdigest() if cfg.has_section("unix_http_server"): set_if_missing(cfg, "unix_http_server", "username", username) set_if_missing(cfg, "unix_http_server", "password", password) serverurl = "unix://" + cfg.get("unix_http_server", "file") else: # This picks a "random" port in the 9000 range to listen on. # It's derived from the secret key, so it's stable for a given # project but multiple projects are unlikely to collide. port = int(hashlib.md5(password.encode()).hexdigest()[:3], 16) % 1000 addr = "127.0.0.1:9%03d" % (port, ) set_if_missing(cfg, "inet_http_server", "port", addr) set_if_missing(cfg, "inet_http_server", "username", username) set_if_missing(cfg, "inet_http_server", "password", password) serverurl = "http://" + cfg.get("inet_http_server", "port") set_if_missing(cfg, "supervisorctl", "serverurl", serverurl) set_if_missing(cfg, "supervisorctl", "username", username) set_if_missing(cfg, "supervisorctl", "password", password) set_if_missing(cfg, "rpcinterface:supervisor", "supervisor.rpcinterface_factory", "supervisor.rpcinterface:make_main_rpcinterface") # Remove any [program:] sections with exclude=true for section in cfg.sections(): try: if cfg.getboolean(section, "exclude"): cfg.remove_section(section) except NoOptionError: pass # Sanity-check to give better error messages. for section in cfg.sections(): if section.startswith("program:"): if not cfg.has_option(section, "command"): msg = "Process name '%s' has no command configured" raise ValueError(msg % (section.split(":", 1)[-1])) # Write it out to a StringIO and return the data s = StringIO() cfg.write(s) return s.getvalue()
class ReportState(object): config_file = None config = None first_run = None previous_run = None next_run = None interval = None timer = None logger = None date_format = '%d-%m-%Y %H:%M:%S' def run_report(self): # TODO: DO STUFF TO SEND EMAIL mailer = Mailer(self.config_file, self) mailer.report() self.previous_run = self.next_run self.next_run = self.next_run + self.interval time_to_report = self.next_run - datetime.utcnow() self.timer = Timer(time_to_report.total_seconds(), self.run_report) self.timer.start() def stop_report(self): self.timer.cancel() def __init__(self, system_config_file, config_file, content_hash, logs_path): self.content_hash = content_hash # Load and save configs self.config_file = config_file self.config = RawConfigParser(allow_no_value=True) self.config.readfp(open(system_config_file, 'r')) self.config.read(config_file) # Prepare logging instance self.logger = create_logger_report(logs_path, config_file) # If defined load date format, if not use the default one if self.config.has_option('general', 'date format'): self.date_format = self.config.get('general', 'date format') # Parse date of first run self.first_run = datetime.strptime( self.config.get('general', 'start date'), self.date_format) # DEBUG #self.first_run = datetime.utcnow() + timedelta(seconds=5) # Parse interval between reports interval_regex = '((((?P<days>\d+):)?(?P<hours>\d+):)?(?P<minutes>\d+):)?(?P<seconds>\d+)' match = re.match(interval_regex, self.config.get('general', 'time interval')) if match is None: self.logger.critical( 'Time interval could not be parsed. Format is: days:hours:minutes:seconds' ) raise InvalidReportException() else: days = int(match.group('days')) if match.group('days') else 0 hours = int(match.group('hours')) if match.group('hours') else 0 minutes = int( match.group('minutes')) if match.group('minutes') else 0 seconds = int( match.group('seconds')) if match.group('seconds') else 0 self.interval = timedelta(days=days, hours=hours, minutes=minutes, seconds=seconds) # Parse included data interval if self.config.has_option('general', 'report data interval'): match = re.match( interval_regex, self.config.get('general', 'report data interval')) if match is None: self.logger.critical( 'Report data interval could not be parsed. Format is: days:hours:minutes:seconds' ) self.data_interval = None else: days = int(match.group('days')) if match.group('days') else 0 hours = int( match.group('hours')) if match.group('hours') else 0 minutes = int( match.group('minutes')) if match.group('minutes') else 0 seconds = int( match.group('seconds')) if match.group('seconds') else 0 self.data_interval = timedelta(days=days, hours=hours, minutes=minutes, seconds=seconds) else: self.data_interval = None # Calculate next run self.next_run = self.first_run # Calculate previous_run self.previous_run = self.first_run - self.interval # Create trigger for reporting time_to_report = self.first_run - datetime.utcnow() self.timer = Timer(time_to_report.total_seconds(), self.run_report) self.timer.start()