def __init__(self, client_id, client_secret, access_token=None, refresh_token=None): self.client_id = client_id self.client_secret = client_secret self.access_token = access_token self.refresh_token = refresh_token self.expires_in = None parser = SafeConfigParser() parser.read(os.path.dirname(os.path.abspath(__file__)) + '/config.ini') self._requests = requests.Session() try: self.SSL_VERSION = parser.get('config', 'ssl_version') self._requests.mount( 'https://', SSLAdapter(ssl_version=getattr(ssl, self.SSL_VERSION))) except: self._requests = requests self.API_ROOT_URL = parser.get('config', 'api_root_url') self.SDK_VERSION = parser.get('config', 'sdk_version') self.AUTH_URL = parser.get('config', 'auth_url') self.OAUTH_URL = parser.get('config', 'oauth_url')
def __init__(self): here = os.path.dirname(__file__) self.filename = os.path.join(here, self._filename) parser = SafeConfigParser() parser.getlist = lambda s, o: parser.get(s, o).split() parser.getlines = lambda s, o: [l.strip() for l in parser.get(s, o).splitlines() if l.strip()] found = parser.read(self.filename) if not found: raise RuntimeError('failed to read app config %r' % self.filename) getters = {} for attr, options in self._getters.items(): getters.update(dict.fromkeys(options, getattr(parser, attr))) def items(section): for o in parser.options(section): yield o, getters.get(o, parser.get)(section, o) kwargs = [dict([('name', section)] + list(items(section))) for section in parser.sections()] apps = [App(**kw) for kw in kwargs] # some consistency checks: names and ports must be unique to make it # possible to deploy each app on each server. names = [app.name for app in apps] ports = [app.port for app in apps] assert len(names) == len(set(names)) assert len(ports) == len(set(ports)) super(Config, self).__init__((app.name, app) for app in apps)
def parse_config(variable=None): """ Parse the Trident local configuration file. This function is called whenever Trident is imported, and it assures that Trident knows where the default ion table datafiles exist. If a ``config.tri`` file doesn't exist in ``$HOME/.trident`` or in the current working directory, then Trident will launch the :class:`~trident.create_config` function to try to automatically generate one for the user. For more information on this process, see the installation documentation. **Parameters** :variable: string, optional If you wish to get the value a variable is set to in the config file, specify that variable name here. Will return the result value of that variable. Default: None """ # Assure the ~/.trident directory exists, and read in the config file. home = os.path.expanduser("~") directory = os.path.join(home, '.trident') config_filename = os.path.join(directory, 'config.tri') # If config file exists in current directory, use it instead of file in # $HOME/.trident. Stopgap for situations where user cannot access $HOME local_filename = os.path.join(os.getcwd(), 'config.tri') if os.path.exists(local_filename): config_filename = local_filename try: parser = SafeConfigParser() parser.read(config_filename) ion_table_dir = parser.get('Trident', 'ion_table_dir') ion_table_file = parser.get('Trident', 'ion_table_file') except: config_filename = create_config() parser = SafeConfigParser() parser.read(config_filename) ion_table_dir = parser.get('Trident', 'ion_table_dir') ion_table_file = parser.get('Trident', 'ion_table_file') ion_table_dir = os.path.abspath(os.path.expanduser(ion_table_dir)) if not os.path.exists(os.path.join(ion_table_dir, ion_table_file)): print("") print("No ion table data file found in %s" % ion_table_dir) ion_table_file = get_datafiles(ion_table_dir) parser.set('Trident', 'ion_table_file', ion_table_file) with open(config_filename, 'w') as configfile: parser.write(configfile) # value to return depends on what was set for "variable" if variable is None: return ion_table_dir, ion_table_file else: return parser.get('Trident', variable)
def read_config_file(config_file_path): # type: (str) -> None parser = SafeConfigParser() parser.read(config_file_path) for section in parser.sections(): bots_config[section] = { "email": parser.get(section, 'email'), "key": parser.get(section, 'key'), "site": parser.get(section, 'site'), }
def read_config_file(config_file_path): # type: (str) -> None parser = SafeConfigParser() parser.read(config_file_path) for section in parser.sections(): bots_config[section] = { "email": parser.get(section, 'email'), "key": parser.get(section, 'key'), "site": parser.get(section, 'site'), }
def read_conf(conf_path, section_name=None, defaults=None, use_yaml=False): if use_yaml: return parse_config(conf_path) if defaults is None: defaults = {} parser = SafeConfigParser(defaults) success = parser.read(conf_path) if not success: print("Unable to read config from %s" % conf_path) sys.exit(1) if section_name: if parser.has_section(section_name): # if log_format is set, extract it from the parser # to prevent to expand variables which can, in case of # log_format throw a ConfigParser.InterpolationMissingOptionError log_format = None if parser.has_option(section_name, 'log_format'): log_format = parser.get(section_name, 'log_format', raw=True) # don't use remove_options because it can fail without reason parser.set(section_name, 'log_format', '') conf = dict(parser.items(section_name)) # Add log_format again, after parsing if log_format: conf['log_format'] = log_format else: print('Unable to find section %s in config %s' % (section_name, conf_path)) exit(1) else: conf = {} for section in parser.sections(): # if log_format is set, extract it from the parser # to prevent to expand variables which can, in case of # log_format throw a ConfigParser.InterpolationMissingOptionError log_format = None if parser.has_option(section, 'log_format'): log_format = parser.get(section, 'log_format', raw=True) # don't use remove_options because it can fail without reason parser.set(section, 'log_format', '') conf.update({section: dict(parser.items(section))}) # Add log_format again, after parsing if log_format: conf[section]['log_format'] = log_format return conf
def read_config_file(config_file_path): # type: (str) -> None config_file_path = os.path.abspath(os.path.expanduser(config_file_path)) if not os.path.isfile(config_file_path): raise IOError("Could not read config file {}: File not found.".format(config_file_path)) parser = SafeConfigParser() parser.read(config_file_path) for section in parser.sections(): bots_config[section] = { "email": parser.get(section, 'email'), "key": parser.get(section, 'key'), "site": parser.get(section, 'site'), }
def read_header(self): ''' Read the backup file header that contains the meta data about this particular backup. ''' with open(self.header) as fd: config = SafeConfigParser() config.readfp(fd) self.backup_type = config.get('ipa', 'type') self.backup_time = config.get('ipa', 'time') self.backup_host = config.get('ipa', 'host') self.backup_ipa_version = config.get('ipa', 'ipa_version') self.backup_version = config.get('ipa', 'version') self.backup_services = config.get('ipa', 'services').split(',')
def getManifest(fp, format, defaults=None): """Read the manifest from the given open file pointer according to the given ManifestFormat. Pass a dict as ``defaults`` to override the defaults from the manifest format. """ if defaults is None: defaults = format.defaults parser = SafeConfigParser() parser.readfp(fp) results = {} for key in format.keys: if parser.has_option(format.resourceType, key): results[key] = parser.get(format.resourceType, key) else: results[key] = defaults.get(key, None) for key in format.parameterSections: sectionName = "%s:%s" % (format.resourceType, key,) if parser.has_section(sectionName): results[key] = dict(parser.items(sectionName)) else: results[key] = {} return results
class Config(object): """A ConfigParser wrapper to support defaults when calling instance methods, and also tied to a single section""" SECTION = 'silkyy' def __init__(self, values=None, extra_sources=()): if values is None: sources = self._getsources() self.cp = ConfigParser() if __package__: default_config = ensure_str(get_data(__package__, 'default.conf')) self._load_config_file(StringIO(default_config)) for source in sources: if os.path.exists(source): self._load_config_file(open(source)) else: self.cp = SafeConfigParser(values) self.cp.add_section(self.SECTION) def _load_config_file(self, fp): config = StringIO() config.write('[' + self.SECTION + ']' + os.linesep) config.write(fp.read()) config.seek(0, os.SEEK_SET) self.cp.readfp(config) def _getsources(self): sources = ['conf/silkyy.conf'] return sources def get(self, option, default=None): env_key = 'SILKYY_' + option.replace('.', '_').upper() try: return os.getenv(env_key) or self.cp.get(self.SECTION, option) except (NoSectionError, NoOptionError): if default is not None: return default raise def _get(self, option, conv, default=None): return conv(self.get(option, default)) def getint(self, option, default=None): return self._get(option, int, default) def getboolean(self, option, default=None): return self._get(option, str2bool, default) def getfloat(self, option, default=None): return self._get(option, float, default) def items(self, section, default=None): try: return self.cp.items(section) except (NoSectionError, NoOptionError): if default is not None: return default raise
def getManifest(fp, format, defaults=None): """Read the manifest from the given open file pointer according to the given ManifestFormat. Pass a dict as ``defaults`` to override the defaults from the manifest format. """ if defaults is None: defaults = format.defaults parser = SafeConfigParser() if six.PY2: parser.readfp(fp) else: data = fp.read() if isinstance(data, six.binary_type): data = data.decode() parser.read_string(data) results = {} for key in format.keys: if parser.has_option(format.resourceType, key): results[key] = parser.get(format.resourceType, key) else: results[key] = defaults.get(key, None) for key in format.parameterSections: sectionName = "%s:%s" % (format.resourceType, key,) if parser.has_section(sectionName): results[key] = dict(parser.items(sectionName)) else: results[key] = {} return results
def setUp(self): super(FunctionalTestBase, self).setUp() if not os.path.exists(TEST_CFG): raise Exception("Unable to run the write tests without a test.ini in that defines an access_token with write privs.") cfg = SafeConfigParser() with open(TEST_CFG) as fp: cfg.readfp(fp, 'test.ini') access_token = cfg.get('write_tests', 'access_token') try: activity_id = cfg.get('activity_tests', 'activity_id') except NoOptionError: activity_id = None self.client = Client(access_token=access_token) self.activity_id = activity_id
def load_from_config(config_file, args, values): """ Load config from user's home folder, and load into config object. If values are given during runtime that conflict with values in config file, the config file values are overwritten. :param config_file: Name of an existing config file :param args: Array of values containing argument names from main :param values: Array of values containing values from arguments from main :return: key/value pairs of args/values """ # TODO: Handle args not existing in user config file and passed args config = dict() if config_file: load_config = os.path.join(BASE_CONFIG_DIR, config_file) if os.path.exists(load_config): parser = SafeConfigParser() parser.read(load_config) for _var in CONFIG_VARS: config[_var['var']] = parser.get('settings', _var['var']) items = values.items() for k, v in items: if k in args and v is not None: config[k] = v for _var in CONFIG_VARS: if _var['var'] in args and values[_var['var']] is not None: config[_var['var']] = values[_var['var']] if _var["var"] not in config: click.echo(_var['error']) return namedtuple('GenericDict', config.keys())(**config)
class ConfigHelper(object): NONE_VALUE = 'None' DEFAULT_HOST = "127.0.0.1" DEFAULT_PORT = 5696 DEFAULT_CERTFILE = os.path.normpath(os.path.join( FILE_PATH, '../demos/certs/server.crt')) DEFAULT_KEYFILE = os.path.normpath(os.path.join( FILE_PATH, '../demos/certs/server.key')) DEFAULT_CA_CERTS = os.path.normpath(os.path.join( FILE_PATH, '../demos/certs/server.crt')) DEFAULT_SSL_VERSION = 'PROTOCOL_SSLv23' DEFAULT_USERNAME = None DEFAULT_PASSWORD = None def __init__(self): self.logger = logging.getLogger(__name__) self.conf = SafeConfigParser() if self.conf.read(CONFIG_FILE): self.logger.debug("Using config file at {0}".format(CONFIG_FILE)) else: self.logger.warning( "Config file {0} not found".format(CONFIG_FILE)) def get_valid_value(self, direct_value, config_section, config_option_name, default_value): """Returns a value that can be used as a parameter in client or server. If a direct_value is given, that value will be returned instead of the value from the config file. If the appropriate config file option is not found, the default_value is returned. :param direct_value: represents a direct value that should be used. supercedes values from config files :param config_section: which section of the config file to use :param config_option_name: name of config option value :param default_value: default value to be used if other options not found :returns: a value that can be used as a parameter """ ARG_MSG = "Using given value '{0}' for {1}" CONF_MSG = "Using value '{0}' from configuration file {1} for {2}" DEFAULT_MSG = "Using default value '{0}' for {1}" if direct_value: return_value = direct_value self.logger.debug(ARG_MSG.format(direct_value, config_option_name)) else: try: return_value = self.conf.get(config_section, config_option_name) self.logger.debug(CONF_MSG.format(return_value, CONFIG_FILE, config_option_name)) except: return_value = default_value self.logger.debug(DEFAULT_MSG.format(default_value, config_option_name)) if return_value == self.NONE_VALUE: return None else: return return_value
def handle(self, *args, **options): # type: (*Any, **Any) -> None config_file = os.path.join(os.environ["HOME"], ".zuliprc") if not os.path.exists(config_file): raise RuntimeError("No ~/.zuliprc found") config = SafeConfigParser() with open(config_file, 'r') as f: config.readfp(f, config_file) api_key = config.get("api", "key") email = config.get("api", "email") try: user_profile = get_user_profile_by_email(email) user_profile.api_key = api_key user_profile.save(update_fields=["api_key"]) except UserProfile.DoesNotExist: print("User %s does not exist; not syncing API key" % (email,))
def setUp(self): super(FunctionalTestBase, self).setUp() if not os.path.exists(TEST_CFG): raise Exception( "Unable to run the write tests without a test.ini in that defines an access_token with write privs." ) cfg = SafeConfigParser() with open(TEST_CFG) as fp: cfg.readfp(fp, 'test.ini') access_token = cfg.get('write_tests', 'access_token') try: activity_id = cfg.get('activity_tests', 'activity_id') except NoOptionError: activity_id = None self.client = Client(access_token=access_token) self.activity_id = activity_id
def get_host_domain_and_realm(self): """Return the hostname and IPA realm name. IPA 4.4 introduced the requirement that the schema be fetched when calling finalize(). This is really only used by the ipa command-line tool but for now it is baked in. So we have to get a TGT first but need the hostname and realm. For now directly read the IPA config file which is in INI format and pull those two values out and return as a tuple. """ config = SafeConfigParser() config.read('/etc/ipa/default.conf') hostname = config.get('global', 'host') realm = config.get('global', 'realm') domain = config.get('global', 'domain') return hostname, domain, realm
def _read_config(self, config_name): parser = SafeConfigParser() parser.read(config_name) try: self.ssl = parser.get("vars", "ssl").lower() in ["1", "true"] except (NoSectionError, NoOptionError): self.ssl = True if self.ssl: try: self.trust_store_path = parser.get("vars", "trust_store_path") except (NoSectionError, NoOptionError): self.trust_store_path = '/etc/pki/vdsm' else: self.trust_store_path = None try: self.management_port = parser.get("addresses", "management_port") except (NoSectionError, NoOptionError): self.management_port = '54321'
def _read_config(self, config_name): parser = SafeConfigParser() parser.read(config_name) try: self.ssl = parser.get("vars", "ssl").lower() in ["1", "true"] except (NoSectionError, NoOptionError): self.ssl = True if self.ssl: try: self.trust_store_path = parser.get("vars", "trust_store_path") except (NoSectionError, NoOptionError): self.trust_store_path = '/etc/pki/vdsm' else: self.trust_store_path = None try: self.management_port = parser.get("addresses", "management_port") except (NoSectionError, NoOptionError): self.management_port = '54321'
def _update_gridinit_meta(self, name, port): grid = SafeConfigParser() grid.read(GRID_CONF) section = "service.%s-%s" % (self.ns, name) val = grid.get(section, "Group").split(",") org_addr = val[3] val[3] = val[3].split(':')[0] + ':' + str(port) grid.set(section, "Group", ",".join(val)) cmd = grid.get(section, 'command') if cmd and org_addr in cmd: cmd = cmd.replace(org_addr, val[3]) grid.set(section, 'command', cmd) with open(GRID_CONF, "w") as fp: grid.write(fp)
def create_database(config_file): parser = SafeConfigParser() parser.read(config_file) # Determine which database connection to use. database_connection = parser.get('app:main', 'install_database_connection') if database_connection is None: database_connection = parser.get('app:main', 'database_connection') if database_connection is None: database_connection = 'sqlite:///%s' % parser.get('app:main', 'database_file') if database_connection is None: print('Unable to determine correct database connection.') exit(1) '''Initialize the database file.''' # Initialize the database connection. engine = create_engine(database_connection) MetaData(bind=engine) install_session = scoped_session(sessionmaker(bind=engine, autoflush=False, autocommit=True)) model = mapping.init(database_connection) return install_session, model
def _update_gridinit_rawx(self, port): grid = SafeConfigParser() grid.read(GRID_CONF) section = "Service.%s-%s" % (self.ns, self.name) val = grid.get(section, "Group").split(",") val[3] = val[3].split(':')[0] + ':' + str(port) grid.set(section, "Group", ",".join(val)) with open(GRID_CONF, "w") as fp: grid.write(fp)
def create_database(config_file): parser = SafeConfigParser() parser.read(config_file) # Determine which database connection to use. database_connection = parser.get('app:main', 'install_database_connection') if database_connection is None: database_connection = parser.get('app:main', 'database_connection') if database_connection is None: database_connection = 'sqlite:///%s' % parser.get( 'app:main', 'database_file') if database_connection is None: print('Unable to determine correct database connection.') exit(1) '''Initialize the database file.''' # Initialize the database connection. engine = create_engine(database_connection) MetaData(bind=engine) install_session = scoped_session( sessionmaker(bind=engine, autoflush=False, autocommit=True)) model = mapping.init(database_connection) return install_session, model
def __parse_config(discover_server=True): p = SafeConfigParser() p.read(IPA_DEFAULT_CONF) try: if not config.default_realm: config.default_realm = p.get("global", "realm") except Exception: pass if discover_server: try: s = p.get("global", "xmlrpc_uri") server = urlsplit(s) config.default_server.append(server.netloc) except Exception: pass try: if not config.default_domain: config.default_domain = p.get("global", "domain") except Exception: pass
def __parse_config(discover_server = True): p = SafeConfigParser() p.read(IPA_DEFAULT_CONF) try: if not config.default_realm: config.default_realm = p.get("global", "realm") except Exception: pass if discover_server: try: s = p.get("global", "xmlrpc_uri") server = urlsplit(s) config.default_server.append(server.netloc) except Exception: pass try: if not config.default_domain: config.default_domain = p.get("global", "domain") except Exception: pass
def get_vrouter_tor_agent_name(self, conf_file): tor_agent_name = None if conf_file: try: data = StringIO('\n'.join(line.strip() for line in open(conf_file))) Config = SafeConfigParser() Config.readfp(data) except Exception as err: self.msg_log("Error reading file : " + conf_file + " Error : " + str(err), SandeshLevel.SYS_ERR) return tor_agent_name tor_agent_name = Config.get("DEFAULT", "agent_name") return tor_agent_name
def find_scrapy_project(project): project_config_path = closest_scrapy_cfg() if not project_config_path: raise RuntimeError('Cannot find scrapy.cfg file') project_config = SafeConfigParser() project_config.read(project_config_path) try: project_settings = project_config.get('settings', project) except (NoSectionError, NoOptionError) as e: raise RuntimeError(e.message) if not project_settings: raise RuntimeError('Cannot find scrapy project settings') project_location = os.path.dirname(project_config_path) sys.path.append(project_location) return project_settings
def nodemgr_sighup_handler(self): collector_list = list() config = SafeConfigParser() config.read([self.config.config_file_path]) if 'COLLECTOR' in config.sections(): try: collector = config.get('COLLECTOR', 'server_list') collector_list = collector.split() except NoOptionError: pass collector_list.sort() new_chksum = hashlib.md5(("".join(collector_list)).encode()).hexdigest() if new_chksum != self.collector_chksum: self.collector_chksum = new_chksum self.random_collectors = random.sample(collector_list, len(collector_list)) self.sandesh_instance.reconfig_collectors(self.random_collectors)
def read(config_path): config_path = os.path.abspath(config_path) config_root = os.path.dirname(config_path) parser = SafeConfigParser() success = parser.read(config_path) assert config_path in success, success subns = {"pwd": os.path.abspath(os.path.curdir)} rv = OrderedDict() for section in parser.sections(): rv[section] = ConfigDict(config_root) for key in parser.options(section): rv[section][key] = parser.get(section, key, raw=False, vars=subns) return rv
def read(config_path): config_path = os.path.abspath(config_path) config_root = os.path.split(config_path)[0] parser = SafeConfigParser() success = parser.read(config_path) assert config_path in success, success subns = {"pwd": os.path.abspath(os.path.curdir)} rv = OrderedDict() for section in parser.sections(): rv[section] = ConfigDict(config_root) for key in parser.options(section): rv[section][key] = parser.get(section, key, False, subns) return rv
def get(self, section, option, default=None, **kwargs): """Wrapper around SafeConfigParser.get() with a custom default value. This method simply wraps the base class method, but adds a `default` keyword argument. The value of `default` is returned whenever the config parser does not have the requested option and/or section. """ if not self.has_option(section, option): return default try: return SafeConfigParser.get(self, section, option, **kwargs) except ValueError as e: # provide somewhat descriptive error raise ValueError( "Failed to obtain value from configuration for %s.%s. " "Original exception was: %s" % (section, option, e))
def get_vrouter_process_info(self, proc_name): vrouter_file = "/etc/contrail/supervisord_vrouter_files" for root, dirs, files in os.walk(vrouter_file): for file in files: if file.endswith(".ini"): filename = \ '/etc/contrail/supervisord_vrouter_files/' + file try: data = StringIO('\n'.join(line.strip() for line in open(filename))) except IOError: msg = "This file does not exist anymore so continuing: " self.msg_log(msg + filename, SandeshLevel.SYS_ERR) continue Config = SafeConfigParser() Config.readfp(data) sections = Config.sections() if not sections[0]: msg = "Section not present in the ini file : " self.msg_log(msg + filename, SandeshLevel.SYS_ERR) continue name = sections[0].split(':') if len(name) < 2: msg = "Incorrect section name in the ini file : " self.msg_log(msg + filename, SandeshLevel.SYS_ERR) continue if name[1] == proc_name: command = Config.get(sections[0], "command") if not command: msg = "Command not present in the ini file : " self.msg_log(msg + filename, SandeshLevel.SYS_ERR) continue args = command.split() if (args[0] == '/usr/bin/contrail-tor-agent'): try: index = args.index('--config_file') args_val = args[index + 1] agent_name = \ self.get_vrouter_tor_agent_name(args_val) return (proc_name, agent_name) except Exception as err: msg = "Tor Agent command does " + \ "not have config file : " self.msg_log(msg + command, SandeshLevel.SYS_ERR) return ('vrouter_group', socket.getfqdn(self.host_ip) if self.hostname is None else self.hostname)
def get(self, section, option, default=None, **kwargs): """Wrapper around SafeConfigParser.get() with a custom default value. This method simply wraps the base class method, but adds a `default` keyword argument. The value of `default` is returned whenever the config parser does not have the requested option and/or section. """ if not self.has_option(section, option): return default try: return SafeConfigParser.get(self, section, option, **kwargs) except ValueError as e: # provide somewhat descriptive error raise ValueError( "Failed to obtain value from configuration for %s.%s. " "Original exception was: %s" % (section, option, e))
def sighup_handler(self): if self._args.conf_file: config = SafeConfigParser() config.read(self._args.conf_file) if 'DEFAULTS' in config.sections(): try: collectors = config.get('DEFAULTS', 'collectors') if type(collectors) is str: collectors = collectors.split() new_chksum = hashlib.md5( "".join(collectors)).hexdigest() if new_chksum != self._chksum: self._chksum = new_chksum config.random_collectors = random.sample( collectors, len(collectors)) # Reconnect to achieve loadbalance irrespective of list self.logger.sandesh_reconfig_collectors(config) except NoOptionError: pass
def parse(self): """ :return: """ parser = SafeConfigParser() parser.readfp(StringIO(self.obj.content)) for section in parser.sections(): try: content = parser.get(section=section, option="deps") for n, line in enumerate(content.splitlines()): if self.is_marked_line(line): continue if line: req = RequirementsTXTLineParser.parse(line) if req: req.dependency_type = self.obj.file_type self.obj.dependencies.append(req) except NoOptionError: pass
def check_dlm_cfgfile(): """Parse DLM config file""" fname = "/etc/dlm/dlm.conf" try: with open(fname, 'r') as fp: cfgs = '[dlm]\n' + fp.read() except (OSError, IOError): return False cfg = SafeConfigParser() try: cfg.read_string(cfgs) except AttributeError: # Python2 ConfigParser doesn't have cfg.read_string from cStringIO import StringIO cfg.readfp(StringIO(cfgs)) if not cfg.has_option('dlm', 'protocol'): return False proto = cfg.get('dlm', 'protocol').lower() return proto in ['sctp', 'detect', '1', '2']
def parse_config_file(self, config_file): config = SafeConfigParser(self.DEFAULT_CONFIG) config.readfp(config_file) blessconfig = { 'CLIENT_CONFIG': { 'domain_regex': config.get('CLIENT', 'domain_regex'), 'cache_dir': config.get('CLIENT', 'cache_dir'), 'cache_file': config.get('CLIENT', 'cache_file'), 'mfa_cache_dir': config.get('CLIENT', 'mfa_cache_dir'), 'mfa_cache_file': config.get('CLIENT', 'mfa_cache_file'), 'ip_urls': [ s.strip() for s in config.get('CLIENT', 'ip_urls').split(",") ], 'update_script': config.get('CLIENT', 'update_script'), 'user_session_length': int(config.get('CLIENT', 'user_session_length')), 'usebless_role_session_length': int(config.get('CLIENT', 'usebless_role_session_length')), 'update_sshagent': config.getboolean('CLIENT', 'update_sshagent'), }, 'BLESS_CONFIG': { 'ca_backend': config.get('MAIN', 'ca_backend'), 'userrole': config.get('LAMBDA', 'user_role'), 'accountid': config.get('LAMBDA', 'account_id'), 'functionname': config.get('LAMBDA', 'functionname'), 'functionversion': config.get('LAMBDA', 'functionversion'), 'certlifetime': config.getint('LAMBDA', 'certlifetime'), 'ipcachelifetime': config.getint('LAMBDA', 'ipcachelifetime'), 'timeoutconfig': { 'connect': config.getint('LAMBDA', 'timeout_connect'), 'read': config.getint('LAMBDA', 'timeout_read') } }, 'AWS_CONFIG': { 'bastion_ips': config.get('MAIN', 'bastion_ips'), 'remote_user': config.get('MAIN', 'remote_user') }, 'REGION_ALIAS': {} } if blessconfig['BLESS_CONFIG']['ca_backend'].lower( ) == 'hashicorp-vault': blessconfig['VAULT_CONFIG'] = { 'vault_addr': config.get('VAULT', 'vault_addr'), 'auth_mount': config.get('VAULT', 'auth_mount'), 'ssh_backend_mount': config.get('VAULT', 'ssh_backend_mount'), 'ssh_backend_role': config.get('VAULT', 'ssh_backend_role'), } regions = config.get('MAIN', 'region_aliases').split(",") regions = [region.strip() for region in regions] for region in regions: region = region.upper() kms_region_key = 'KMSAUTH_CONFIG_{}'.format(region) blessconfig.update( {kms_region_key: self._get_region_kms_config(region, config)}) blessconfig['REGION_ALIAS'].update( {region: blessconfig[kms_region_key]['awsregion']}) return blessconfig
dest='insecure', default=True, help='Allow insecure connection when using SSL') (options, args) = parser.parse_args() LOG.debug('Running with parameter insecure = %s', options.insecure) if os.path.isfile(nova_cfg): config = SafeConfigParser() config.read(nova_cfg) else: LOG.error('Nova configuration file %s does not exist', nova_cfg) sys.exit(1) my_host = config.get('DEFAULT', 'host') if not my_host: # If host isn't set nova defaults to this my_host = socket.gethostname() loader = loading.get_plugin_loader('password') auth = loader.load_from_options( auth_url=config.get('neutron', 'auth_url'), username=config.get('neutron', 'username'), password=config.get('neutron', 'password'), project_name=config.get('neutron', 'project_name'), project_domain_name=config.get('neutron',
def __init__(self, email=None, api_key=None, config_file=None, verbose=False, retry_on_errors=True, site=None, client=None, cert_bundle=None, insecure=None, client_cert=None, client_cert_key=None): # type: (Optional[str], Optional[str], Optional[str], bool, bool, Optional[str], Optional[str], Optional[str], bool, Optional[str], Optional[str]) -> None if client is None: client = _default_client() # Fill values from Environment Variables if not available in Constructor if config_file is None: config_file = os.environ.get("ZULIP_CONFIG") if api_key is None: api_key = os.environ.get("ZULIP_API_KEY") if email is None: email = os.environ.get("ZULIP_EMAIL") if site is None: site = os.environ.get("ZULIP_SITE") if client_cert is None: client_cert = os.environ.get("ZULIP_CERT") if client_cert_key is None: client_cert_key = os.environ.get("ZULIP_CERT_KEY") if cert_bundle is None: cert_bundle = os.environ.get("ZULIP_CERT_BUNDLE") if config_file is None: config_file = get_default_config_filename() if os.path.exists(config_file): config = SafeConfigParser() with open(config_file, 'r') as f: config.readfp(f, config_file) if api_key is None: api_key = config.get("api", "key") if email is None: email = config.get("api", "email") if site is None and config.has_option("api", "site"): site = config.get("api", "site") if client_cert is None and config.has_option("api", "client_cert"): client_cert = config.get("api", "client_cert") if client_cert_key is None and config.has_option( "api", "client_cert_key"): client_cert_key = config.get("api", "client_cert_key") if cert_bundle is None and config.has_option("api", "cert_bundle"): cert_bundle = config.get("api", "cert_bundle") if insecure is None and config.has_option("api", "insecure"): # Be quite strict about what is accepted so that users don't # disable security unintentionally. insecure_setting = config.get("api", "insecure").lower() if insecure_setting == "true": insecure = True elif insecure_setting == "false": insecure = False else: raise RuntimeError( "insecure is set to '%s', it must be 'true' or 'false' if it is used in %s" % (insecure_setting, config_file)) elif None in (api_key, email): raise RuntimeError( "api_key or email not specified and %s does not exist" % (config_file, )) self.api_key = api_key self.email = email self.verbose = verbose if site is not None: if site.startswith("localhost"): site = "http://" + site elif not site.startswith("http"): site = "https://" + site # Remove trailing "/"s from site to simplify the below logic for adding "/api" site = site.rstrip("/") self.base_url = site else: raise RuntimeError( "Missing Zulip server URL; specify via --site or ~/.zuliprc.") if not self.base_url.endswith("/api"): self.base_url += "/api" self.base_url += "/" self.retry_on_errors = retry_on_errors self.client_name = client if insecure: self.tls_verification = False # type: Union[bool, str] elif cert_bundle is not None: if not os.path.isfile(cert_bundle): raise RuntimeError("tls bundle '%s' does not exist" % (cert_bundle, )) self.tls_verification = cert_bundle else: # Default behavior: verify against system CA certificates self.tls_verification = True if client_cert is None: if client_cert_key is not None: raise RuntimeError( "client cert key '%s' specified, but no client cert public part provided" % (client_cert_key, )) else: # we have a client cert if not os.path.isfile(client_cert): raise RuntimeError("client cert '%s' does not exist" % (client_cert, )) if client_cert_key is not None: if not os.path.isfile(client_cert_key): raise RuntimeError("client cert key '%s' does not exist" % (client_cert_key, )) self.client_cert = client_cert self.client_cert_key = client_cert_key
def __init__(self, email=None, api_key=None, config_file=None, verbose=False, retry_on_errors=True, site=None, client=None, cert_bundle=None, insecure=None, client_cert=None, client_cert_key=None): # type: (Optional[str], Optional[str], Optional[str], bool, bool, Optional[str], Optional[str], Optional[str], Optional[bool], Optional[str], Optional[str]) -> None if client is None: client = _default_client() # Normalize user-specified path if config_file is not None: config_file = os.path.abspath(os.path.expanduser(config_file)) # Fill values from Environment Variables if not available in Constructor if config_file is None: config_file = os.environ.get("ZULIP_CONFIG") if api_key is None: api_key = os.environ.get("ZULIP_API_KEY") if email is None: email = os.environ.get("ZULIP_EMAIL") if site is None: site = os.environ.get("ZULIP_SITE") if client_cert is None: client_cert = os.environ.get("ZULIP_CERT") if client_cert_key is None: client_cert_key = os.environ.get("ZULIP_CERT_KEY") if cert_bundle is None: cert_bundle = os.environ.get("ZULIP_CERT_BUNDLE") if insecure is None: # Be quite strict about what is accepted so that users don't # disable security unintentionally. insecure_setting = os.environ.get('ZULIP_ALLOW_INSECURE') if insecure_setting is not None: insecure = validate_boolean_field(insecure_setting) if insecure is None: raise ZulipError( "The ZULIP_ALLOW_INSECURE environment " "variable is set to '{}', it must be " "'true' or 'false'".format(insecure_setting)) if config_file is None: config_file = get_default_config_filename() if config_file is not None and os.path.exists(config_file): config = SafeConfigParser() with open(config_file, 'r') as f: config.readfp(f, config_file) if api_key is None: api_key = config.get("api", "key") if email is None: email = config.get("api", "email") if site is None and config.has_option("api", "site"): site = config.get("api", "site") if client_cert is None and config.has_option("api", "client_cert"): client_cert = config.get("api", "client_cert") if client_cert_key is None and config.has_option( "api", "client_cert_key"): client_cert_key = config.get("api", "client_cert_key") if cert_bundle is None and config.has_option("api", "cert_bundle"): cert_bundle = config.get("api", "cert_bundle") if insecure is None and config.has_option("api", "insecure"): # Be quite strict about what is accepted so that users don't # disable security unintentionally. insecure_setting = config.get('api', 'insecure') insecure = validate_boolean_field(insecure_setting) if insecure is None: raise ZulipError( "insecure is set to '{}', it must be " "'true' or 'false' if it is used in {}".format( insecure_setting, config_file)) elif None in (api_key, email): raise ConfigNotFoundError( "api_key or email not specified and file %s does not exist" % (config_file, )) assert (api_key is not None and email is not None) self.api_key = api_key self.email = email self.verbose = verbose if site is not None: if site.startswith("localhost"): site = "http://" + site elif not site.startswith("http"): site = "https://" + site # Remove trailing "/"s from site to simplify the below logic for adding "/api" site = site.rstrip("/") self.base_url = site else: raise MissingURLError( "Missing Zulip server URL; specify via --site or ~/.zuliprc.") if not self.base_url.endswith("/api"): self.base_url += "/api" self.base_url += "/" self.retry_on_errors = retry_on_errors self.client_name = client if insecure: logger.warning('Insecure mode enabled. The server\'s SSL/TLS ' 'certificate will not be validated, making the ' 'HTTPS connection potentially insecure') self.tls_verification = False # type: Union[bool, str] elif cert_bundle is not None: if not os.path.isfile(cert_bundle): raise ConfigNotFoundError("tls bundle '%s' does not exist" % (cert_bundle, )) self.tls_verification = cert_bundle else: # Default behavior: verify against system CA certificates self.tls_verification = True if client_cert is None: if client_cert_key is not None: raise ConfigNotFoundError( "client cert key '%s' specified, but no client cert public part provided" % (client_cert_key, )) else: # we have a client cert if not os.path.isfile(client_cert): raise ConfigNotFoundError("client cert '%s' does not exist" % (client_cert, )) if client_cert_key is not None: if not os.path.isfile(client_cert_key): raise ConfigNotFoundError( "client cert key '%s' does not exist" % (client_cert_key, )) self.client_cert = client_cert self.client_cert_key = client_cert_key self.session = None # type: Optional[requests.Session] self.has_connected = False
class GlobusConfigParser(object): """ Wraps a SafeConfigParser to do modified get()s and config file loading. """ _GENERAL_CONF_SECTION = 'general' def __init__(self): self._parser = SafeConfigParser() self._load_config() def _load_config(self): # TODO: /etc is not windows friendly, not sure about expanduser try: self._parser.read([_get_lib_config_path(), "/etc/globus.cfg", os.path.expanduser("~/.globus.cfg")]) except MissingSectionHeaderError: raise GlobusError( "Failed to parse your ~/.globus.cfg Your config file may be " "in an old format. Please visit https://tokens.globus.org/ to " "get the latest format of this file.") def get(self, option, section=None, environment=None, failover_to_general=False, check_env=False, type_cast=str): """ Attempt to lookup an option in the config file. Optionally failover to the general section if the option is not found. Also optionally, check for a relevant environment variable, which is named always as GLOBUS_SDK_{option.upper()}. Note that 'section' doesn't slot into the naming at all. Otherwise, we'd have to contend with GLOBUS_SDK_GENERAL_... for almost everything, and GLOBUS_SDK_ENVIRONMENT\ PROD_... which is awful. Returns None for an unfound key, rather than raising a NoOptionError. """ # envrionment is just a fancy name for sections that start with # 'environment ' if environment: section = 'environment ' + environment # if you don't specify a section or an environment, assume it's the # general conf section if section is None: section = self._GENERAL_CONF_SECTION # if this is a config option which checks the shell env, look there # *first* for a value -- env values have higher precedence than config # files so that you can locally override the behavior of a command in a # given shell or subshell env_option_name = 'GLOBUS_SDK_{}'.format(option.upper()) value = None if check_env and env_option_name in os.environ: value = os.environ[env_option_name] else: try: value = self._parser.get(section, option) except (NoOptionError, NoSectionError): if failover_to_general: value = self.get(option, section=self._GENERAL_CONF_SECTION) if value is not None: value = type_cast(value) return value
def __init__(self, email=None, api_key=None, config_file=None, verbose=False, retry_on_errors=True, site=None, client=None, cert_bundle=None, insecure=None, client_cert=None, client_cert_key=None): # type: (Optional[str], Optional[str], Optional[str], bool, bool, Optional[str], Optional[str], Optional[str], bool, Optional[str], Optional[str]) -> None if client is None: client = _default_client() # Fill values from Environment Variables if not available in Constructor if config_file is None: config_file = os.environ.get("ZULIP_CONFIG") if api_key is None: api_key = os.environ.get("ZULIP_API_KEY") if email is None: email = os.environ.get("ZULIP_EMAIL") if site is None: site = os.environ.get("ZULIP_SITE") if client_cert is None: client_cert = os.environ.get("ZULIP_CERT") if client_cert_key is None: client_cert_key = os.environ.get("ZULIP_CERT_KEY") if cert_bundle is None: cert_bundle = os.environ.get("ZULIP_CERT_BUNDLE") if config_file is None: config_file = get_default_config_filename() if config_file is not None and os.path.exists(config_file): config = SafeConfigParser() with open(config_file, 'r') as f: config.readfp(f, config_file) if api_key is None: api_key = config.get("api", "key") if email is None: email = config.get("api", "email") if site is None and config.has_option("api", "site"): site = config.get("api", "site") if client_cert is None and config.has_option("api", "client_cert"): client_cert = config.get("api", "client_cert") if client_cert_key is None and config.has_option("api", "client_cert_key"): client_cert_key = config.get("api", "client_cert_key") if cert_bundle is None and config.has_option("api", "cert_bundle"): cert_bundle = config.get("api", "cert_bundle") if insecure is None and config.has_option("api", "insecure"): # Be quite strict about what is accepted so that users don't # disable security unintentionally. insecure_setting = config.get("api", "insecure").lower() if insecure_setting == "true": insecure = True elif insecure_setting == "false": insecure = False else: raise RuntimeError("insecure is set to '%s', it must be 'true' or 'false' if it is used in %s" % (insecure_setting, config_file)) elif None in (api_key, email): raise RuntimeError("api_key or email not specified and %s does not exist" % (config_file,)) self.api_key = api_key self.email = email self.verbose = verbose if site is not None: if site.startswith("localhost"): site = "http://" + site elif not site.startswith("http"): site = "https://" + site # Remove trailing "/"s from site to simplify the below logic for adding "/api" site = site.rstrip("/") self.base_url = site else: raise RuntimeError("Missing Zulip server URL; specify via --site or ~/.zuliprc.") if not self.base_url.endswith("/api"): self.base_url += "/api" self.base_url += "/" self.retry_on_errors = retry_on_errors self.client_name = client if insecure: self.tls_verification = False # type: Union[bool, str] elif cert_bundle is not None: if not os.path.isfile(cert_bundle): raise RuntimeError("tls bundle '%s' does not exist" % (cert_bundle,)) self.tls_verification = cert_bundle else: # Default behavior: verify against system CA certificates self.tls_verification = True if client_cert is None: if client_cert_key is not None: raise RuntimeError("client cert key '%s' specified, but no client cert public part provided" % (client_cert_key,)) else: # we have a client cert if not os.path.isfile(client_cert): raise RuntimeError("client cert '%s' does not exist" % (client_cert,)) if client_cert_key is not None: if not os.path.isfile(client_cert_key): raise RuntimeError("client cert key '%s' does not exist" % (client_cert_key,)) self.client_cert = client_cert self.client_cert_key = client_cert_key
raise RuntimeError( "Database not responding at {} after {} tries. " "Giving up".format(database_url, max_tries) ) if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument( "--workers", default=2, help="Number of workers to use by the gunicorn server. Defaults to 2." ) parser.add_argument( "-r", "--reload", action="store_true", help="Should the gunicorn server automatically restart workers when " "code changes? This option is only useful for development. " "Defaults to False." ) args = parser.parse_args() config = SafeConfigParser() config.read(os.getenv("PYCSW_CONFIG")) try: level = config.get("server", "loglevel").upper() except NoOptionError: level = "WARNING" logging.basicConfig(level=getattr(logging, level)) launch_pycsw(config, workers=args.workers, reload=args.reload)
def onDiscovery(self, theme, settings, dependenciesSettings): res = queryResourceDirectory(THEME_RESOURCE_NAME, theme) if res is None: return directoryName = 'views' if 'directory' in settings: directoryName = settings['directory'] if res.isDirectory(directoryName): viewsDir = res[directoryName] layer = getattr(schemata, theme, None) if 'layer' in settings: layerName = settings['layer'] try: layer = resolve(layerName) except (ImportError, AttributeError): logger.warn("Could not import %s" % layerName) return viewConfig = SafeConfigParser() if viewsDir.isFile(VIEW_CONFIG_FILENAME): fp = viewsDir.openFile(VIEW_CONFIG_FILENAME) try: viewConfig.readfp(fp) finally: try: fp.close() except AttributeError: pass views = [] configurationMachine = ConfigurationMachine() path = viewsDir.directory for filename in os.listdir(path): if not filename.lower().endswith(EXTENSION): continue name = viewName = filename[:-3] permission = 'zope2.View' for_ = Interface class_ = None template = os.path.join(path, filename) menu = {} # Read override options from views.cfg if applicable if viewConfig.has_section(name): if viewConfig.has_option(name, 'name'): viewName = viewConfig.get(name, 'name') if viewConfig.has_option(name, 'permission'): permission = viewConfig.get(name, 'permission') if viewConfig.has_option(name, 'for'): forStr = viewConfig.get(name, 'for') if forStr != "*": for_ = resolve(forStr) if viewConfig.has_option(name, 'class'): class_ = resolve(viewConfig.get(name, 'class')) if viewConfig.has_option(name, 'menu'): menu = dict( title=viewConfig.get(name, 'menu'), menu=getattr( zope.browsermenu.metaconfigure.menus, "plone_displayviews", ), ) Products.Five.browser.metaconfigure.page( configurationMachine, name=viewName, permission=permission, for_=for_, layer=layer, template=template, class_=class_, **menu ) views.append(name) if len(views) > 0: configurationMachine.execute_actions() self.registered[theme] = views
if options.zulip_config_file is None: config_file = zulip.get_default_config_filename() else: config_file = options.zulip_config_file config = SafeConfigParser() try: with open(config_file, 'r') as f: config.readfp(f, config_file) except IOError: pass for option in ("jid", "jabber_password", "conference_domain", "mode", "zulip_email_suffix", "jabber_server_address", "jabber_server_port"): if (getattr(options, option) is None and config.has_option("jabber_mirror", option)): setattr(options, option, config.get("jabber_mirror", option)) for option in ("no_use_tls",): if getattr(options, option) is None: if config.has_option("jabber_mirror", option): setattr(options, option, config.getboolean("jabber_mirror", option)) else: setattr(options, option, False) if options.mode is None: options.mode = "personal" if options.zulip_email_suffix is None: options.zulip_email_suffix = '' if options.mode not in ('public', 'personal'):
class PackageConfigHandler(object): """ Manager class for packages files for tracking installation of modules """ def __init__(self): # noinspection PyUnresolvedReferences from six.moves.configparser import SafeConfigParser self.package_cfg = os.path.expanduser('~/Documents/site-packages/.pypi_packages') if not os.path.isfile(self.package_cfg): print('Creating package file') with open(self.package_cfg, 'w') as outs: outs.close() self.parser = SafeConfigParser() self.parser.read(self.package_cfg) def save(self): with open(self.package_cfg, 'w') as outs: self.parser.write(outs) def add_module(self, pkg_info): """ :param pkg_info: A dict that has name, url, version, summary :return: """ if not self.parser.has_section(pkg_info['name']): self.parser.add_section(pkg_info['name']) self.parser.set(pkg_info['name'], 'url', pkg_info['url']) self.parser.set(pkg_info['name'], 'version', pkg_info['version']) self.parser.set(pkg_info['name'], 'summary', pkg_info['summary']) self.parser.set(pkg_info['name'], 'files', pkg_info['files']) self.parser.set(pkg_info['name'], 'dependency', pkg_info['dependency']) self.save() def list_modules(self): return [module for module in self.parser.sections()] def module_exists(self, name): return self.parser.has_section(name) def get_info(self, name): if self.parser.has_section(name): tbl = {} for opt, value in self.parser.items(name): tbl[opt] = value return tbl def remove_module(self, name): self.parser.remove_section(name) self.save() def get_files_installed(self, section_name): if self.parser.has_option(section_name, 'files'): files = self.parser.get(section_name, 'files').strip() return files.split(',') else: return None def get_dependencies(self, section_name): if self.parser.has_option(section_name, 'dependency'): dependencies = self.parser.get(section_name, 'dependency').strip() return set(dependencies.split(',')) if dependencies != '' else set() else: return None def get_all_dependencies(self, exclude_module=()): all_dependencies = set() for section_name in self.parser.sections(): if section_name not in exclude_module and self.parser.has_option(section_name, 'dependency'): dependencies = self.parser.get(section_name, 'dependency').strip() if dependencies != '': for dep in dependencies.split(','): all_dependencies.add(dep) return all_dependencies
class Config(object): """ Manages the configuration file """ def __init__(self): """ DEFAULT VALUES """ self._basescript = None self.recentvaults = [] self.pwlength = 10 self.search_notes = False self.search_passwd = False self.alphabet = "abcdefghikmnopqrstuvwxyz23456789ABCDEFGHJKLMNPQRSTUVWXYZ_" self.avoid_bigrams = "cl mn nm nn rn vv VV" self._fname = self.get_config_filename() self._parser = SafeConfigParser() if os.path.exists(self._fname): self._parser.read(self._fname) if not self._parser.has_section("base"): self._parser.add_section("base") for num in range(10): if (not self._parser.has_option("base", "recentvaults" + str(num))): break self.recentvaults.append(self._parser.get("base", "recentvaults" + str(num))) if self._parser.has_option("base", "pwlength"): self.pwlength = int(self._parser.get("base", "pwlength")) if self._parser.has_option("base", "search_notes"): if self._parser.get("base", "search_notes") == "True": self.search_notes = True if self._parser.has_option("base", "search_passwd"): if self._parser.get("base", "search_passwd") == "True": self.search_passwd = True if self._parser.has_option("base", "alphabet"): self.alphabet = self._parser.get("base", "alphabet") if self._parser.has_option("base", "avoid_bigrams"): self.avoid_bigrams = self._parser.get("base", "avoid_bigrams") if not os.path.exists(self._fname): self.save() def set_basescript(self, basescript): self._basescript = basescript def get_basescript(self): return self._basescript def save(self): if (not os.path.exists(os.path.dirname(self._fname))): os.mkdir(os.path.dirname(self._fname)) # remove duplicates and trim to 10 items _saved_recentvaults = [] for item in self.recentvaults: if item in _saved_recentvaults: continue self._parser.set("base", "recentvaults" + str(len(_saved_recentvaults)), item) _saved_recentvaults.append(item) if (len(_saved_recentvaults) >= 10): break self._parser.set("base", "pwlength", str(self.pwlength)) self._parser.set("base", "search_notes", str(self.search_notes)) self._parser.set("base", "search_passwd", str(self.search_passwd)) self._parser.set("base", "alphabet", str(self.alphabet)) self._parser.set("base", "avoid_bigrams", str(self.avoid_bigrams)) filehandle = open(self._fname, 'w') self._parser.write(filehandle) filehandle.close() @staticmethod def get_config_filename(): """ Returns the full filename of the config file """ base_fname = "loxodo" # On Mac OS X, config files go to ~/Library/Application Support/foo/ if platform.system() == "Darwin": base_path = os.path.join(os.path.expanduser("~"), "Library", "Application Support") if os.path.isdir(base_path): return os.path.join(base_path, base_fname, base_fname + ".ini") # On Microsoft Windows, config files go to $APPDATA/foo/ if platform.system() in ("Windows", "Microsoft"): if ("APPDATA" in os.environ): base_path = os.environ["APPDATA"] if os.path.isdir(base_path): return os.path.join(base_path, base_fname, base_fname + ".ini") # Allow config directory override as per freedesktop.org XDG Base Directory Specification if ("XDG_CONFIG_HOME" in os.environ): base_path = os.environ["XDG_CONFIG_HOME"] if os.path.isdir(base_path): return os.path.join(base_path, base_fname, base_fname + ".ini") # Default configuration path is ~/.config/foo/ base_path = os.path.join(os.path.expanduser("~"), ".config") if os.path.isdir(base_path): return os.path.join(base_path, base_fname, base_fname + ".ini") else: return os.path.join(os.path.expanduser("~"),"."+ base_fname + ".ini")
class Csw(object): """ Base CSW server """ def __init__(self, rtconfig=None, env=None, version='3.0.0'): """ Initialize CSW """ if not env: self.environ = os.environ else: self.environ = env self.context = config.StaticContext() # Lazy load this when needed # (it will permanently update global cfg namespaces) self.sruobj = None self.opensearchobj = None self.oaipmhobj = None # init kvp self.kvp = {} self.mode = 'csw' self.asynchronous = False self.soap = False self.request = None self.exception = False self.status = 'OK' self.profiles = None self.manager = False self.outputschemas = {} self.mimetype = 'application/xml; charset=UTF-8' self.encoding = 'UTF-8' self.pretty_print = 0 self.domainquerytype = 'list' self.orm = 'django' self.language = {'639_code': 'en', 'text': 'english'} self.process_time_start = time() # define CSW implementation object (default CSW3) self.iface = csw3.Csw3(server_csw=self) self.request_version = version if self.request_version == '2.0.2': self.iface = csw2.Csw2(server_csw=self) self.context.set_model('csw') # load user configuration try: LOGGER.info('Loading user configuration') if isinstance(rtconfig, SafeConfigParser): # serialized already self.config = rtconfig else: self.config = SafeConfigParser() if isinstance(rtconfig, dict): # dictionary for section, options in rtconfig.items(): self.config.add_section(section) for k, v in options.items(): self.config.set(section, k, v) else: # configuration file import codecs with codecs.open(rtconfig, encoding='utf-8') as scp: self.config.readfp(scp) except Exception as err: msg = 'Could not load configuration' LOGGER.exception('%s %s: %s', msg, rtconfig, err) self.response = self.iface.exceptionreport( 'NoApplicableCode', 'service', msg) return # set server.home safely # TODO: make this more abstract self.config.set( 'server', 'home', os.path.dirname(os.path.join(os.path.dirname(__file__), '..')) ) self.context.pycsw_home = self.config.get('server', 'home') self.context.url = self.config.get('server', 'url') log.setup_logger(self.config) LOGGER.info('running configuration %s', rtconfig) LOGGER.debug('QUERY_STRING: %s', self.environ['QUERY_STRING']) # set OGC schemas location if not self.config.has_option('server', 'ogc_schemas_base'): self.config.set('server', 'ogc_schemas_base', self.context.ogc_schemas_base) # set mimetype if self.config.has_option('server', 'mimetype'): self.mimetype = self.config.get('server', 'mimetype').encode() # set encoding if self.config.has_option('server', 'encoding'): self.encoding = self.config.get('server', 'encoding') # set domainquerytype if self.config.has_option('server', 'domainquerytype'): self.domainquerytype = self.config.get('server', 'domainquerytype') # set XML pretty print if (self.config.has_option('server', 'pretty_print') and self.config.get('server', 'pretty_print') == 'true'): self.pretty_print = 1 # set Spatial Ranking option if (self.config.has_option('server', 'spatial_ranking') and self.config.get('server', 'spatial_ranking') == 'true'): util.ranking_enabled = True # set language default if self.config.has_option('server', 'language'): try: LOGGER.info('Setting language') lang_code = self.config.get('server', 'language').split('-')[0] self.language['639_code'] = lang_code self.language['text'] = self.context.languages[lang_code] except Exception as err: LOGGER.exception('Could not set language: %s', err) pass LOGGER.debug('Configuration: %s.', self.config) LOGGER.debug('Model: %s.', self.context.model) # load user-defined mappings if they exist if self.config.has_option('repository', 'mappings'): # override default repository mappings try: import imp module = self.config.get('repository', 'mappings') if os.sep in module: # filepath modulename = '%s' % os.path.splitext(module)[0].replace( os.sep, '.') mappings = imp.load_source(modulename, module) else: # dotted name mappings = __import__(module, fromlist=['']) LOGGER.info('Loading custom repository mappings ' 'from %s', module) self.context.md_core_model = mappings.MD_CORE_MODEL self.context.refresh_dc(mappings.MD_CORE_MODEL) except Exception as err: LOGGER.exception('Could not load custom mappings: %s', err) self.response = self.iface.exceptionreport( 'NoApplicableCode', 'service', 'Could not load repository.mappings') # load outputschemas LOGGER.info('Loading outputschemas') for osch in pycsw.plugins.outputschemas.__all__: output_schema_module = __import__( 'pycsw.plugins.outputschemas.%s' % osch) mod = getattr(output_schema_module.plugins.outputschemas, osch) self.outputschemas[mod.NAMESPACE] = mod LOGGER.debug('Outputschemas loaded: %s.', self.outputschemas) LOGGER.debug('Namespaces: %s', self.context.namespaces) def expand_path(self, path): """ return safe path for WSGI environments """ if 'local.app_root' in self.environ and not os.path.isabs(path): return os.path.join(self.environ['local.app_root'], path) else: return path def dispatch_wsgi(self): """ WSGI handler """ if hasattr(self, 'response'): return self._write_response() LOGGER.debug('WSGI mode detected') if self.environ['REQUEST_METHOD'] == 'POST': try: request_body_size = int(self.environ.get('CONTENT_LENGTH', 0)) except (ValueError): request_body_size = 0 self.requesttype = 'POST' self.request = self.environ['wsgi.input'].read(request_body_size) LOGGER.debug('Request type: POST. Request:\n%s\n', self.request) else: # it's a GET request self.requesttype = 'GET' self.request = wsgiref.util.request_uri(self.environ) try: query_part = splitquery(self.request)[-1] self.kvp = dict(parse_qsl(query_part, keep_blank_values=True)) except AttributeError as err: LOGGER.exception('Could not parse query string') self.kvp = {} LOGGER.debug('Request type: GET. Request:\n%s\n', self.request) return self.dispatch() def opensearch(self): """ enable OpenSearch """ if not self.opensearchobj: self.opensearchobj = opensearch.OpenSearch(self.context) return self.opensearchobj def sru(self): """ enable SRU """ if not self.sruobj: self.sruobj = sru.Sru(self.context) return self.sruobj def oaipmh(self): """ enable OAI-PMH """ if not self.oaipmhobj: self.oaipmhobj = oaipmh.OAIPMH(self.context, self.config) return self.oaipmhobj def dispatch(self, writer=sys.stdout, write_headers=True): """ Handle incoming HTTP request """ error = 0 if self.requesttype == 'GET': self.kvp = self.normalize_kvp(self.kvp) version_202 = ('version' in self.kvp and self.kvp['version'] == '2.0.2') accept_version_202 = ('acceptversions' in self.kvp and '2.0.2' in self.kvp['acceptversions']) if version_202 or accept_version_202: self.request_version = '2.0.2' elif self.requesttype == 'POST': if self.request.find(b'cat/csw/2.0.2') != -1: self.request_version = '2.0.2' elif self.request.find(b'cat/csw/3.0') != -1: self.request_version = '3.0.0' if (not isinstance(self.kvp, str) and 'mode' in self.kvp and self.kvp['mode'] == 'sru'): self.mode = 'sru' self.request_version = '2.0.2' LOGGER.info('SRU mode detected; processing request') self.kvp = self.sru().request_sru2csw(self.kvp) if (not isinstance(self.kvp, str) and 'mode' in self.kvp and self.kvp['mode'] == 'oaipmh'): self.mode = 'oaipmh' self.request_version = '2.0.2' LOGGER.info('OAI-PMH mode detected; processing request.') self.oaiargs = dict((k, v) for k, v in self.kvp.items() if k) self.kvp = self.oaipmh().request(self.kvp) if self.request_version == '2.0.2': self.iface = csw2.Csw2(server_csw=self) self.context.set_model('csw') # configure transaction support, if specified in config self._gen_manager() namespaces = self.context.namespaces ops = self.context.model['operations'] constraints = self.context.model['constraints'] # generate domain model # NOTE: We should probably avoid this sort of mutable state for WSGI if 'GetDomain' not in ops: ops['GetDomain'] = self.context.gen_domains() # generate distributed search model, if specified in config if self.config.has_option('server', 'federatedcatalogues'): LOGGER.info('Configuring distributed search') constraints['FederatedCatalogues'] = {'values': []} for fedcat in self.config.get('server', 'federatedcatalogues').split(','): LOGGER.debug('federated catalogue: %s', fedcat) constraints['FederatedCatalogues']['values'].append(fedcat) for key, value in self.outputschemas.items(): get_records_params = ops['GetRecords']['parameters'] get_records_params['outputSchema']['values'].append( value.NAMESPACE) get_records_by_id_params = ops['GetRecordById']['parameters'] get_records_by_id_params['outputSchema']['values'].append( value.NAMESPACE) if 'Harvest' in ops: harvest_params = ops['Harvest']['parameters'] harvest_params['ResourceType']['values'].append( value.NAMESPACE) LOGGER.info('Setting MaxRecordDefault') if self.config.has_option('server', 'maxrecords'): constraints['MaxRecordDefault']['values'] = [ self.config.get('server', 'maxrecords')] # load profiles if self.config.has_option('server', 'profiles'): self.profiles = pprofile.load_profiles( os.path.join('pycsw', 'plugins', 'profiles'), pprofile.Profile, self.config.get('server', 'profiles') ) for prof in self.profiles['plugins'].keys(): tmp = self.profiles['plugins'][prof](self.context.model, namespaces, self.context) key = tmp.outputschema # to ref by outputschema self.profiles['loaded'][key] = tmp self.profiles['loaded'][key].extend_core(self.context.model, namespaces, self.config) LOGGER.debug('Profiles loaded: %s' % list(self.profiles['loaded'].keys())) # init repository # look for tablename, set 'records' as default if not self.config.has_option('repository', 'table'): self.config.set('repository', 'table', 'records') repo_filter = None if self.config.has_option('repository', 'filter'): repo_filter = self.config.get('repository', 'filter') if self.config.has_option('repository', 'source'): # load custom repository rs = self.config.get('repository', 'source') rs_modname, rs_clsname = rs.rsplit('.', 1) rs_mod = __import__(rs_modname, globals(), locals(), [rs_clsname]) rs_cls = getattr(rs_mod, rs_clsname) try: self.repository = rs_cls(self.context, repo_filter) LOGGER.debug('Custom repository %s loaded (%s)', rs, self.repository.dbtype) except Exception as err: msg = 'Could not load custom repository %s: %s' % (rs, err) LOGGER.exception(msg) error = 1 code = 'NoApplicableCode' locator = 'service' text = 'Could not initialize repository. Check server logs' else: # load default repository self.orm = 'sqlalchemy' from pycsw.core import repository try: LOGGER.info('Loading default repository') self.repository = repository.Repository( self.config.get('repository', 'database'), self.context, self.environ.get('local.app_root', None), self.config.get('repository', 'table'), repo_filter ) LOGGER.debug( 'Repository loaded (local): %s.' % self.repository.dbtype) except Exception as err: msg = 'Could not load repository (local): %s' % err LOGGER.exception(msg) error = 1 code = 'NoApplicableCode' locator = 'service' text = 'Could not initialize repository. Check server logs' if self.requesttype == 'POST': LOGGER.debug('HTTP POST request') LOGGER.debug('CSW version: %s', self.iface.version) self.kvp = self.iface.parse_postdata(self.request) if isinstance(self.kvp, str): # it's an exception error = 1 locator = 'service' text = self.kvp if (self.kvp.find('the document is not valid') != -1 or self.kvp.find('document not well-formed') != -1): code = 'NoApplicableCode' else: code = 'InvalidParameterValue' LOGGER.debug('HTTP Headers:\n%s.', self.environ) LOGGER.debug('Parsed request parameters: %s', self.kvp) if (not isinstance(self.kvp, str) and 'mode' in self.kvp and self.kvp['mode'] == 'opensearch'): self.mode = 'opensearch' LOGGER.info('OpenSearch mode detected; processing request.') self.kvp['outputschema'] = 'http://www.w3.org/2005/Atom' if ((len(self.kvp) == 0 and self.request_version == '3.0.0') or (len(self.kvp) == 1 and 'config' in self.kvp)): LOGGER.info('Turning on default csw30:Capabilities for base URL') self.kvp = { 'service': 'CSW', 'acceptversions': '3.0.0', 'request': 'GetCapabilities' } http_accept = self.environ.get('HTTP_ACCEPT', '') if 'application/opensearchdescription+xml' in http_accept: self.mode = 'opensearch' self.kvp['outputschema'] = 'http://www.w3.org/2005/Atom' if error == 0: # test for the basic keyword values (service, version, request) basic_options = ['service', 'request'] request = self.kvp.get('request', '') own_version_integer = util.get_version_integer( self.request_version) if self.request_version == '2.0.2': basic_options.append('version') if self.request_version == '3.0.0' and 'version' not in self.kvp and self.requesttype == 'POST': if 'service' not in self.kvp: self.kvp['service'] = 'CSW' basic_options.append('service') self.kvp['version'] = self.request_version basic_options.append('version') for k in basic_options: if k not in self.kvp: if (k in ['version', 'acceptversions'] and request == 'GetCapabilities'): pass else: error = 1 locator = k code = 'MissingParameterValue' text = 'Missing keyword: %s' % k break # test each of the basic keyword values if error == 0: # test service if self.kvp['service'] != 'CSW': error = 1 locator = 'service' code = 'InvalidParameterValue' text = 'Invalid value for service: %s.\ Value MUST be CSW' % self.kvp['service'] # test version kvp_version = self.kvp.get('version', '') try: kvp_version_integer = util.get_version_integer(kvp_version) except Exception as err: kvp_version_integer = 'invalid_value' if (request != 'GetCapabilities' and kvp_version_integer != own_version_integer): error = 1 locator = 'version' code = 'InvalidParameterValue' text = ('Invalid value for version: %s. Value MUST be ' '2.0.2 or 3.0.0' % kvp_version) # check for GetCapabilities acceptversions if 'acceptversions' in self.kvp: for vers in self.kvp['acceptversions'].split(','): vers_integer = util.get_version_integer(vers) if vers_integer == own_version_integer: break else: error = 1 locator = 'acceptversions' code = 'VersionNegotiationFailed' text = ('Invalid parameter value in ' 'acceptversions: %s. Value MUST be ' '2.0.2 or 3.0.0' % self.kvp['acceptversions']) # test request if self.kvp['request'] not in \ self.context.model['operations']: error = 1 locator = 'request' if request in ['Transaction', 'Harvest']: code = 'OperationNotSupported' text = '%s operations are not supported' % request else: code = 'InvalidParameterValue' text = 'Invalid value for request: %s' % request if error == 1: # return an ExceptionReport LOGGER.error('basic service options error: %s, %s, %s', code, locator, text) self.response = self.iface.exceptionreport(code, locator, text) else: # process per the request value if 'responsehandler' in self.kvp: # set flag to process asynchronously import threading self.asynchronous = True request_id = self.kvp.get('requestid', None) if request_id is None: import uuid self.kvp['requestid'] = str(uuid.uuid4()) if self.kvp['request'] == 'GetCapabilities': self.response = self.iface.getcapabilities() elif self.kvp['request'] == 'DescribeRecord': self.response = self.iface.describerecord() elif self.kvp['request'] == 'GetDomain': self.response = self.iface.getdomain() elif self.kvp['request'] == 'GetRecords': if self.asynchronous: # process asynchronously threading.Thread(target=self.iface.getrecords).start() self.response = self.iface._write_acknowledgement() else: self.response = self.iface.getrecords() elif self.kvp['request'] == 'GetRecordById': self.response = self.iface.getrecordbyid() elif self.kvp['request'] == 'GetRepositoryItem': self.response = self.iface.getrepositoryitem() elif self.kvp['request'] == 'Transaction': self.response = self.iface.transaction() elif self.kvp['request'] == 'Harvest': if self.asynchronous: # process asynchronously threading.Thread(target=self.iface.harvest).start() self.response = self.iface._write_acknowledgement() else: self.response = self.iface.harvest() else: self.response = self.iface.exceptionreport( 'InvalidParameterValue', 'request', 'Invalid request parameter: %s' % self.kvp['request'] ) LOGGER.info('Request processed') if self.mode == 'sru': LOGGER.info('SRU mode detected; processing response.') self.response = self.sru().response_csw2sru(self.response, self.environ) elif self.mode == 'opensearch': LOGGER.info('OpenSearch mode detected; processing response.') self.response = self.opensearch().response_csw2opensearch( self.response, self.config) elif self.mode == 'oaipmh': LOGGER.info('OAI-PMH mode detected; processing response.') self.response = self.oaipmh().response( self.response, self.oaiargs, self.repository, self.config.get('server', 'url') ) return self._write_response() def getcapabilities(self): """ Handle GetCapabilities request """ return self.iface.getcapabilities() def describerecord(self): """ Handle DescribeRecord request """ return self.iface.describerecord() def getdomain(self): """ Handle GetDomain request """ return self.iface.getdomain() def getrecords(self): """ Handle GetRecords request """ return self.iface.getrecords() def getrecordbyid(self, raw=False): """ Handle GetRecordById request """ return self.iface.getrecordbyid(raw) def getrepositoryitem(self): """ Handle GetRepositoryItem request """ return self.iface.getrepositoryitem() def transaction(self): """ Handle Transaction request """ return self.iface.transaction() def harvest(self): """ Handle Harvest request """ return self.iface.harvest() def _write_response(self): """ Generate response """ # set HTTP response headers and XML declaration xmldecl = '' appinfo = '' LOGGER.info('Writing response.') if hasattr(self, 'soap') and self.soap: self._gen_soap_wrapper() if etree.__version__ >= '3.5.0': # remove superfluous namespaces etree.cleanup_namespaces(self.response, keep_ns_prefixes=self.context.keep_ns_prefixes) response = etree.tostring(self.response, pretty_print=self.pretty_print, encoding='unicode') if (isinstance(self.kvp, dict) and 'outputformat' in self.kvp and self.kvp['outputformat'] == 'application/json'): self.contenttype = self.kvp['outputformat'] from pycsw.core.formats import fmt_json response = fmt_json.xml2json(response, self.context.namespaces, self.pretty_print) else: # it's XML if 'outputformat' in self.kvp: self.contenttype = self.kvp['outputformat'] else: self.contenttype = self.mimetype xmldecl = ('<?xml version="1.0" encoding="%s" standalone="no"?>' '\n' % self.encoding) appinfo = '<!-- pycsw %s -->\n' % self.context.version if isinstance(self.contenttype, bytes): self.contenttype = self.contenttype.decode() s = (u'%s%s%s' % (xmldecl, appinfo, response)).encode(self.encoding) LOGGER.debug('Response code: %s', self.context.response_codes[self.status]) LOGGER.debug('Response:\n%s', s) return [self.context.response_codes[self.status], s] def _gen_soap_wrapper(self): """ Generate SOAP wrapper """ LOGGER.info('Writing SOAP wrapper.') node = etree.Element( util.nspath_eval('soapenv:Envelope', self.context.namespaces), nsmap=self.context.namespaces ) schema_location_ns = util.nspath_eval('xsi:schemaLocation', self.context.namespaces) node.attrib[schema_location_ns] = '%s %s' % ( self.context.namespaces['soapenv'], self.context.namespaces['soapenv'] ) node2 = etree.SubElement( node, util.nspath_eval('soapenv:Body', self.context.namespaces)) if self.exception: node3 = etree.SubElement( node2, util.nspath_eval('soapenv:Fault', self.context.namespaces) ) node4 = etree.SubElement( node3, util.nspath_eval('soapenv:Code', self.context.namespaces) ) etree.SubElement( node4, util.nspath_eval('soapenv:Value', self.context.namespaces) ).text = 'soap:Server' node4 = etree.SubElement( node3, util.nspath_eval('soapenv:Reason', self.context.namespaces) ) etree.SubElement( node4, util.nspath_eval('soapenv:Text', self.context.namespaces) ).text = 'A server exception was encountered.' node4 = etree.SubElement( node3, util.nspath_eval('soapenv:Detail', self.context.namespaces) ) node4.append(self.response) else: node2.append(self.response) self.response = node def _gen_manager(self): """ Update self.context.model with CSW-T advertising """ if (self.config.has_option('manager', 'transactions') and self.config.get('manager', 'transactions') == 'true'): self.manager = True self.context.model['operations_order'].append('Transaction') self.context.model['operations']['Transaction'] = { 'methods': {'get': False, 'post': True}, 'parameters': {} } schema_values = [ 'http://www.opengis.net/cat/csw/2.0.2', 'http://www.opengis.net/cat/csw/3.0', 'http://www.opengis.net/wms', 'http://www.opengis.net/wmts/1.0', 'http://www.opengis.net/wfs', 'http://www.opengis.net/wfs/2.0', 'http://www.opengis.net/wcs', 'http://www.opengis.net/wps/1.0.0', 'http://www.opengis.net/sos/1.0', 'http://www.opengis.net/sos/2.0', 'http://www.isotc211.org/2005/gmi', 'urn:geoss:waf', ] self.context.model['operations_order'].append('Harvest') self.context.model['operations']['Harvest'] = { 'methods': {'get': False, 'post': True}, 'parameters': { 'ResourceType': {'values': schema_values} } } self.context.model['operations']['Transaction'] = { 'methods': {'get': False, 'post': True}, 'parameters': { 'TransactionSchemas': {'values': sorted(schema_values)} } } self.csw_harvest_pagesize = 10 if self.config.has_option('manager', 'csw_harvest_pagesize'): self.csw_harvest_pagesize = int( self.config.get('manager', 'csw_harvest_pagesize')) def _test_manager(self): """ Verify that transactions are allowed """ if self.config.get('manager', 'transactions') != 'true': raise RuntimeError('CSW-T interface is disabled') """ get the client first forwarded ip """ if 'HTTP_X_FORWARDED_FOR' in self.environ: ipaddress = self.environ['HTTP_X_FORWARDED_FOR'].split(',')[0].strip() else: ipaddress = self.environ['REMOTE_ADDR'] if not self.config.has_option('manager', 'allowed_ips') or \ (self.config.has_option('manager', 'allowed_ips') and not util.ipaddress_in_whitelist(ipaddress, self.config.get('manager', 'allowed_ips').split(','))): raise RuntimeError( 'CSW-T operations not allowed for this IP address: %s' % ipaddress) def _cql_update_queryables_mappings(self, cql, mappings): """ Transform CQL query's properties to underlying DB columns """ LOGGER.debug('Raw CQL text = %s', cql) LOGGER.debug(str(list(mappings.keys()))) if cql is not None: for key in mappings.keys(): try: cql = cql.replace(key, mappings[key]['dbcol']) except: cql = cql.replace(key, mappings[key]) LOGGER.debug('Interpolated CQL text = %s.', cql) return cql def _process_responsehandler(self, xml): """ Process response handler """ if self.kvp['responsehandler'] is not None: LOGGER.info('Processing responsehandler %s' % self.kvp['responsehandler']) uprh = urlparse(self.kvp['responsehandler']) if uprh.scheme == 'mailto': # email import smtplib LOGGER.debug('Email detected') smtp_host = 'localhost' if self.config.has_option('server', 'smtp_host'): smtp_host = self.config.get('server', 'smtp_host') body = ('Subject: pycsw %s results\n\n%s' % (self.kvp['request'], xml)) try: LOGGER.info('Sending email') msg = smtplib.SMTP(smtp_host) msg.sendmail( self.config.get('metadata:main', 'contact_email'), uprh.path, body ) msg.quit() LOGGER.debug('Email sent successfully.') except Exception as err: LOGGER.exception('Error processing email') elif uprh.scheme == 'ftp': import ftplib LOGGER.debug('FTP detected.') try: LOGGER.info('Sending to FTP server.') ftp = ftplib.FTP(uprh.hostname) if uprh.username is not None: ftp.login(uprh.username, uprh.password) ftp.storbinary('STOR %s' % uprh.path[1:], StringIO(xml)) ftp.quit() LOGGER.debug('FTP sent successfully.') except Exception as err: LOGGER.exception('Error processing FTP') @staticmethod def normalize_kvp(kvp): """Normalize Key Value Pairs. This method will transform all keys to lowercase and leave values unchanged, as specified in the CSW standard (see for example note C on Table 62 - KVP Encoding for DescribeRecord operation request of the CSW standard version 2.0.2) :arg kvp: a mapping with Key Value Pairs :type kvp: dict :returns: A new dictionary with normalized parameters """ result = dict() for name, value in kvp.items(): result[name.lower()] = value return result
def __init__(self, email=None, api_key=None, config_file=None, verbose=False, retry_on_errors=True, site=None, client=None, cert_bundle=None, insecure=None): if client is None: client = _default_client() if config_file is None: config_file = get_default_config_filename() if os.path.exists(config_file): config = SafeConfigParser() with open(config_file, 'r') as f: config.readfp(f, config_file) if api_key is None: api_key = config.get("api", "key") if email is None: email = config.get("api", "email") if site is None and config.has_option("api", "site"): site = config.get("api", "site") if cert_bundle is None and config.has_option("api", "cert_bundle"): cert_bundle = config.get("api", "cert_bundle") if insecure is None and config.has_option("api", "insecure"): # Be quite strict about what is accepted so that users don't # disable security unintentionally. insecure_setting = config.get("api", "insecure").lower() if insecure_setting == "true": insecure = True elif insecure_setting == "false": insecure = False else: raise RuntimeError("insecure is set to '%s', it must be 'true' or 'false' if it is used in %s" % (insecure_setting, config_file)) elif None in (api_key, email): raise RuntimeError("api_key or email not specified and %s does not exist" % (config_file,)) self.api_key = api_key self.email = email self.verbose = verbose if site is not None: if not site.startswith("http"): site = "https://" + site # Remove trailing "/"s from site to simplify the below logic for adding "/api" site = site.rstrip("/") self.base_url = site else: self.base_url = "https://api.zulip.com" if self.base_url != "https://api.zulip.com" and not self.base_url.endswith("/api"): self.base_url += "/api" self.base_url += "/" self.retry_on_errors = retry_on_errors self.client_name = client if insecure: self.tls_verification=False elif cert_bundle is not None: if not os.path.isfile(cert_bundle): raise RuntimeError("tls bundle '%s' does not exist" %(cert_bundle,)) self.tls_verification=cert_bundle else: # Default behavior: verify against system CA certificates self.tls_verification=True
iterations = 60 timeout = 10 nova_cfg = '/etc/nova/nova.conf' if __name__ == '__main__': if os.path.isfile(nova_cfg): config = SafeConfigParser() config.read(nova_cfg) else: LOG.error('Nova configuration file %s does not exist', nova_cfg) sys.exit(1) # get keystone client with details from [placement] section auth = v3.Password( user_domain_name=config.get('placement', 'user_domain_name'), username=config.get('placement', 'username'), password=config.get('placement', 'password'), project_name=config.get('placement', 'project_name'), project_domain_name=config.get('placement', 'user_domain_name'), auth_url=config.get('placement', 'auth_url')+'/v3') sess = session.Session(auth=auth, verify=False) keystone = client.Client(session=sess, interface='internal') iterations_endpoint = iterations placement_endpoint_url = None while iterations_endpoint > 1: iterations_endpoint -= 1 try: # get placement service id placement_service_id = keystone.services.list(
def get_giphy_api_key_from_config(): config = SafeConfigParser() with open(os.environ['HOME'] + '/.giphy_config', 'r') as config_file: config.readfp(config_file) return config.get("giphy-config", "key")
config_file = zulip.get_default_config_filename() else: config_file = options.zulip_config_file config = SafeConfigParser() try: with open(config_file, 'r') as f: config.readfp(f, config_file) except IOError: pass for option in ("jid", "jabber_password", "conference_domain", "mode", "zulip_email_suffix", "jabber_server_address", "jabber_server_port"): if (getattr(options, option) is None and config.has_option("jabber_mirror", option)): setattr(options, option, config.get("jabber_mirror", option)) for option in ("no_use_tls", ): if getattr(options, option) is None: if config.has_option("jabber_mirror", option): setattr(options, option, config.getboolean("jabber_mirror", option)) else: setattr(options, option, False) if options.mode is None: options.mode = "personal" if options.zulip_email_suffix is None: options.zulip_email_suffix = ''
def __init__(self, email=None, api_key=None, config_file=None, verbose=False, retry_on_errors=True, site=None, client=None, cert_bundle=None, insecure=None, client_cert=None, client_cert_key=None): # type: (Optional[str], Optional[str], Optional[str], bool, bool, Optional[str], Optional[str], Optional[str], Optional[bool], Optional[str], Optional[str]) -> None if client is None: client = _default_client() # Normalize user-specified path if config_file is not None: config_file = os.path.abspath(os.path.expanduser(config_file)) # Fill values from Environment Variables if not available in Constructor if config_file is None: config_file = os.environ.get("ZULIP_CONFIG") if api_key is None: api_key = os.environ.get("ZULIP_API_KEY") if email is None: email = os.environ.get("ZULIP_EMAIL") if site is None: site = os.environ.get("ZULIP_SITE") if client_cert is None: client_cert = os.environ.get("ZULIP_CERT") if client_cert_key is None: client_cert_key = os.environ.get("ZULIP_CERT_KEY") if cert_bundle is None: cert_bundle = os.environ.get("ZULIP_CERT_BUNDLE") if insecure is None: # Be quite strict about what is accepted so that users don't # disable security unintentionally. insecure_setting = os.environ.get('ZULIP_ALLOW_INSECURE') if insecure_setting is not None: insecure = validate_boolean_field(insecure_setting) if insecure is None: raise ZulipError("The ZULIP_ALLOW_INSECURE environment " "variable is set to '{}', it must be " "'true' or 'false'" .format(insecure_setting)) if config_file is None: config_file = get_default_config_filename() if config_file is not None and os.path.exists(config_file): config = SafeConfigParser() with open(config_file, 'r') as f: config.readfp(f, config_file) if api_key is None: api_key = config.get("api", "key") if email is None: email = config.get("api", "email") if site is None and config.has_option("api", "site"): site = config.get("api", "site") if client_cert is None and config.has_option("api", "client_cert"): client_cert = config.get("api", "client_cert") if client_cert_key is None and config.has_option("api", "client_cert_key"): client_cert_key = config.get("api", "client_cert_key") if cert_bundle is None and config.has_option("api", "cert_bundle"): cert_bundle = config.get("api", "cert_bundle") if insecure is None and config.has_option("api", "insecure"): # Be quite strict about what is accepted so that users don't # disable security unintentionally. insecure_setting = config.get('api', 'insecure') insecure = validate_boolean_field(insecure_setting) if insecure is None: raise ZulipError("insecure is set to '{}', it must be " "'true' or 'false' if it is used in {}" .format(insecure_setting, config_file)) elif None in (api_key, email): raise ConfigNotFoundError("api_key or email not specified and file %s does not exist" % (config_file,)) assert(api_key is not None and email is not None) self.api_key = api_key self.email = email self.verbose = verbose if site is not None: if site.startswith("localhost"): site = "http://" + site elif not site.startswith("http"): site = "https://" + site # Remove trailing "/"s from site to simplify the below logic for adding "/api" site = site.rstrip("/") self.base_url = site else: raise MissingURLError("Missing Zulip server URL; specify via --site or ~/.zuliprc.") if not self.base_url.endswith("/api"): self.base_url += "/api" self.base_url += "/" self.retry_on_errors = retry_on_errors self.client_name = client if insecure: logger.warning('Insecure mode enabled. The server\'s SSL/TLS ' 'certificate will not be validated, making the ' 'HTTPS connection potentially insecure') self.tls_verification = False # type: Union[bool, str] elif cert_bundle is not None: if not os.path.isfile(cert_bundle): raise ConfigNotFoundError("tls bundle '%s' does not exist" % (cert_bundle,)) self.tls_verification = cert_bundle else: # Default behavior: verify against system CA certificates self.tls_verification = True if client_cert is None: if client_cert_key is not None: raise ConfigNotFoundError("client cert key '%s' specified, but no client cert public part provided" % (client_cert_key,)) else: # we have a client cert if not os.path.isfile(client_cert): raise ConfigNotFoundError("client cert '%s' does not exist" % (client_cert,)) if client_cert_key is not None: if not os.path.isfile(client_cert_key): raise ConfigNotFoundError("client cert key '%s' does not exist" % (client_cert_key,)) self.client_cert = client_cert self.client_cert_key = client_cert_key self.session = None # type: Optional[requests.Session] self.has_connected = False
class Csw(object): """ Base CSW server """ def __init__(self, rtconfig=None, env=None, version='3.0.0'): """ Initialize CSW """ if not env: self.environ = os.environ else: self.environ = env self.context = config.StaticContext() # Lazy load this when needed # (it will permanently update global cfg namespaces) self.sruobj = None self.opensearchobj = None self.oaipmhobj = None # init kvp self.kvp = {} self.mode = 'csw' self.async = False self.soap = False self.request = None self.exception = False self.status = 'OK' self.profiles = None self.manager = False self.outputschemas = {} self.mimetype = 'application/xml; charset=UTF-8' self.encoding = 'UTF-8' self.pretty_print = 0 self.domainquerytype = 'list' self.orm = 'django' self.language = {'639_code': 'en', 'text': 'english'} self.process_time_start = time() # define CSW implementation object (default CSW3) self.iface = csw3.Csw3(server_csw=self) self.request_version = version if self.request_version == '2.0.2': self.iface = csw2.Csw2(server_csw=self) self.context.set_model('csw') # load user configuration try: LOGGER.info('Loading user configuration') if isinstance(rtconfig, SafeConfigParser): # serialized already self.config = rtconfig else: self.config = SafeConfigParser() if isinstance(rtconfig, dict): # dictionary for section, options in rtconfig.items(): self.config.add_section(section) for k, v in options.items(): self.config.set(section, k, v) else: # configuration file import codecs with codecs.open(rtconfig, encoding='utf-8') as scp: self.config.readfp(scp) except Exception as err: LOGGER.exception('Could not load user configuration: %s', err) self.response = self.iface.exceptionreport( 'NoApplicableCode', 'service', 'Error opening configuration %s' % rtconfig ) return # set server.home safely # TODO: make this more abstract self.config.set( 'server', 'home', os.path.dirname(os.path.join(os.path.dirname(__file__), '..')) ) self.context.pycsw_home = self.config.get('server', 'home') self.context.url = self.config.get('server', 'url') log.setup_logger(self.config) LOGGER.info('running configuration %s', rtconfig) LOGGER.debug('QUERY_STRING: %s', self.environ['QUERY_STRING']) # set OGC schemas location if not self.config.has_option('server', 'ogc_schemas_base'): self.config.set('server', 'ogc_schemas_base', self.context.ogc_schemas_base) # set mimetype if self.config.has_option('server', 'mimetype'): self.mimetype = self.config.get('server', 'mimetype').encode() # set encoding if self.config.has_option('server', 'encoding'): self.encoding = self.config.get('server', 'encoding') # set domainquerytype if self.config.has_option('server', 'domainquerytype'): self.domainquerytype = self.config.get('server', 'domainquerytype') # set XML pretty print if (self.config.has_option('server', 'pretty_print') and self.config.get('server', 'pretty_print') == 'true'): self.pretty_print = 1 # set Spatial Ranking option if (self.config.has_option('server', 'spatial_ranking') and self.config.get('server', 'spatial_ranking') == 'true'): util.ranking_enabled = True # set language default if self.config.has_option('server', 'language'): try: LOGGER.info('Setting language') lang_code = self.config.get('server', 'language').split('-')[0] self.language['639_code'] = lang_code self.language['text'] = self.context.languages[lang_code] except Exception as err: LOGGER.exception('Could not set language: %s', err) pass LOGGER.debug('Configuration: %s.', self.config) LOGGER.debug('Model: %s.', self.context.model) # load user-defined mappings if they exist if self.config.has_option('repository', 'mappings'): # override default repository mappings try: import imp module = self.config.get('repository', 'mappings') if '/' in module: # filepath modulename = '%s' % os.path.splitext(module)[0].replace( os.sep, '.') mappings = imp.load_source(modulename, module) else: # dotted name mappings = __import__(module, fromlist=['']) LOGGER.info('Loading custom repository mappings ' 'from %s', module) self.context.md_core_model = mappings.MD_CORE_MODEL self.context.refresh_dc(mappings.MD_CORE_MODEL) except Exception as err: LOGGER.exception('Could not load custom mappings: %s', err) self.response = self.iface.exceptionreport( 'NoApplicableCode', 'service', 'Could not load repository.mappings') # load outputschemas LOGGER.info('Loading outputschemas') for osch in pycsw.plugins.outputschemas.__all__: output_schema_module = __import__( 'pycsw.plugins.outputschemas.%s' % osch) mod = getattr(output_schema_module.plugins.outputschemas, osch) self.outputschemas[mod.NAMESPACE] = mod LOGGER.debug('Outputschemas loaded: %s.', self.outputschemas) LOGGER.debug('Namespaces: %s', self.context.namespaces) def expand_path(self, path): """ return safe path for WSGI environments """ if 'local.app_root' in self.environ and not os.path.isabs(path): return os.path.join(self.environ['local.app_root'], path) else: return path def dispatch_wsgi(self): """ WSGI handler """ if hasattr(self, 'response'): return self._write_response() LOGGER.debug('WSGI mode detected') if self.environ['REQUEST_METHOD'] == 'POST': try: request_body_size = int(self.environ.get('CONTENT_LENGTH', 0)) except (ValueError): request_body_size = 0 self.requesttype = 'POST' self.request = self.environ['wsgi.input'].read(request_body_size) LOGGER.debug('Request type: POST. Request:\n%s\n', self.request) else: # it's a GET request self.requesttype = 'GET' self.request = wsgiref.util.request_uri(self.environ) try: query_part = splitquery(self.request)[-1] self.kvp = dict(parse_qsl(query_part, keep_blank_values=True)) except AttributeError as err: LOGGER.exception('Could not parse query string') self.kvp = {} LOGGER.debug('Request type: GET. Request:\n%s\n', self.request) return self.dispatch() def opensearch(self): """ enable OpenSearch """ if not self.opensearchobj: self.opensearchobj = opensearch.OpenSearch(self.context) return self.opensearchobj def sru(self): """ enable SRU """ if not self.sruobj: self.sruobj = sru.Sru(self.context) return self.sruobj def oaipmh(self): """ enable OAI-PMH """ if not self.oaipmhobj: self.oaipmhobj = oaipmh.OAIPMH(self.context, self.config) return self.oaipmhobj def dispatch(self, writer=sys.stdout, write_headers=True): """ Handle incoming HTTP request """ if self.requesttype == 'GET': self.kvp = self.normalize_kvp(self.kvp) version_202 = ('version' in self.kvp and self.kvp['version'] == '2.0.2') accept_version_202 = ('acceptversions' in self.kvp and '2.0.2' in self.kvp['acceptversions']) if version_202 or accept_version_202: self.request_version = '2.0.2' elif self.requesttype == 'POST': if self.request.find(b'2.0.2') != -1: self.request_version = '2.0.2' if (not isinstance(self.kvp, str) and 'mode' in self.kvp and self.kvp['mode'] == 'sru'): self.mode = 'sru' self.request_version = '2.0.2' LOGGER.info('SRU mode detected; processing request') self.kvp = self.sru().request_sru2csw(self.kvp) if (not isinstance(self.kvp, str) and 'mode' in self.kvp and self.kvp['mode'] == 'oaipmh'): self.mode = 'oaipmh' self.request_version = '2.0.2' LOGGER.info('OAI-PMH mode detected; processing request.') self.oaiargs = dict((k, v) for k, v in self.kvp.items() if k) self.kvp = self.oaipmh().request(self.kvp) if self.request_version == '2.0.2': self.iface = csw2.Csw2(server_csw=self) self.context.set_model('csw') # configure transaction support, if specified in config self._gen_manager() namespaces = self.context.namespaces ops = self.context.model['operations'] constraints = self.context.model['constraints'] # generate domain model # NOTE: We should probably avoid this sort of mutable state for WSGI if 'GetDomain' not in ops: ops['GetDomain'] = self.context.gen_domains() # generate distributed search model, if specified in config if self.config.has_option('server', 'federatedcatalogues'): LOGGER.info('Configuring distributed search') constraints['FederatedCatalogues'] = {'values': []} for fedcat in self.config.get('server', 'federatedcatalogues').split(','): LOGGER.debug('federated catalogue: %s', fedcat) constraints['FederatedCatalogues']['values'].append(fedcat) for key, value in self.outputschemas.items(): get_records_params = ops['GetRecords']['parameters'] get_records_params['outputSchema']['values'].append( value.NAMESPACE) get_records_by_id_params = ops['GetRecordById']['parameters'] get_records_by_id_params['outputSchema']['values'].append( value.NAMESPACE) if 'Harvest' in ops: harvest_params = ops['Harvest']['parameters'] harvest_params['ResourceType']['values'].append( value.NAMESPACE) LOGGER.info('Setting MaxRecordDefault') if self.config.has_option('server', 'maxrecords'): constraints['MaxRecordDefault']['values'] = [ self.config.get('server', 'maxrecords')] # load profiles if self.config.has_option('server', 'profiles'): self.profiles = pprofile.load_profiles( os.path.join('pycsw', 'plugins', 'profiles'), pprofile.Profile, self.config.get('server', 'profiles') ) for prof in self.profiles['plugins'].keys(): tmp = self.profiles['plugins'][prof](self.context.model, namespaces, self.context) key = tmp.outputschema # to ref by outputschema self.profiles['loaded'][key] = tmp self.profiles['loaded'][key].extend_core(self.context.model, namespaces, self.config) LOGGER.debug('Profiles loaded: %s' % list(self.profiles['loaded'].keys())) # init repository # look for tablename, set 'records' as default if not self.config.has_option('repository', 'table'): self.config.set('repository', 'table', 'records') repo_filter = None if self.config.has_option('repository', 'filter'): repo_filter = self.config.get('repository', 'filter') if self.config.has_option('repository', 'source'): # load custom repository rs = self.config.get('repository', 'source') rs_modname, rs_clsname = rs.rsplit('.', 1) rs_mod = __import__(rs_modname, globals(), locals(), [rs_clsname]) rs_cls = getattr(rs_mod, rs_clsname) try: self.repository = rs_cls(self.context, repo_filter) LOGGER.debug('Custom repository %s loaded (%s)', rs, self.repository.dbtype) except Exception as err: msg = 'Could not load custom repository' LOGGER.exception(msg) self.response = self.iface.exceptionreport( 'NoApplicableCode', 'service', msg) else: # load default repository self.orm = 'sqlalchemy' from pycsw.core import repository try: LOGGER.info('Loading default repository') self.repository = repository.Repository( self.config.get('repository', 'database'), self.context, self.environ.get('local.app_root', None), self.config.get('repository', 'table'), repo_filter ) LOGGER.debug( 'Repository loaded (local): %s.' % self.repository.dbtype) except Exception as err: msg = 'Could not load repository (local)' LOGGER.exception(msg) self.response = self.iface.exceptionreport( 'NoApplicableCode', 'service', msg) if self.requesttype == 'POST': LOGGER.debug('HTTP POST request') LOGGER.debug('CSW version: %s', self.iface.version) self.kvp = self.iface.parse_postdata(self.request) error = 0 if isinstance(self.kvp, str): # it's an exception error = 1 locator = 'service' text = self.kvp if (self.kvp.find('the document is not valid') != -1 or self.kvp.find('document not well-formed') != -1): code = 'NoApplicableCode' else: code = 'InvalidParameterValue' LOGGER.debug('HTTP Headers:\n%s.', self.environ) LOGGER.debug('Parsed request parameters: %s', self.kvp) if (not isinstance(self.kvp, str) and 'mode' in self.kvp and self.kvp['mode'] == 'opensearch'): self.mode = 'opensearch' LOGGER.info('OpenSearch mode detected; processing request.') self.kvp['outputschema'] = 'http://www.w3.org/2005/Atom' if ((len(self.kvp) == 0 and self.request_version == '3.0.0') or (len(self.kvp) == 1 and 'config' in self.kvp)): LOGGER.info('Turning on default csw30:Capabilities for base URL') self.kvp = { 'service': 'CSW', 'acceptversions': '3.0.0', 'request': 'GetCapabilities' } http_accept = self.environ.get('HTTP_ACCEPT', '') if 'application/opensearchdescription+xml' in http_accept: self.mode = 'opensearch' self.kvp['outputschema'] = 'http://www.w3.org/2005/Atom' if error == 0: # test for the basic keyword values (service, version, request) basic_options = ['service', 'request'] request = self.kvp.get('request', '') own_version_integer = util.get_version_integer( self.request_version) if self.request_version == '2.0.2': basic_options.append('version') for k in basic_options: if k not in self.kvp: if (k in ['version', 'acceptversions'] and request == 'GetCapabilities'): pass else: error = 1 locator = k code = 'MissingParameterValue' text = 'Missing keyword: %s' % k break # test each of the basic keyword values if error == 0: # test service if self.kvp['service'] != 'CSW': error = 1 locator = 'service' code = 'InvalidParameterValue' text = 'Invalid value for service: %s.\ Value MUST be CSW' % self.kvp['service'] # test version kvp_version = self.kvp.get('version', '') try: kvp_version_integer = util.get_version_integer(kvp_version) except Exception as err: kvp_version_integer = 'invalid_value' if (request != 'GetCapabilities' and kvp_version_integer != own_version_integer): error = 1 locator = 'version' code = 'InvalidParameterValue' text = ('Invalid value for version: %s. Value MUST be ' '2.0.2 or 3.0.0' % kvp_version) # check for GetCapabilities acceptversions if 'acceptversions' in self.kvp: for vers in self.kvp['acceptversions'].split(','): vers_integer = util.get_version_integer(vers) if vers_integer == own_version_integer: break else: error = 1 locator = 'acceptversions' code = 'VersionNegotiationFailed' text = ('Invalid parameter value in ' 'acceptversions: %s. Value MUST be ' '2.0.2 or 3.0.0' % self.kvp['acceptversions']) # test request if self.kvp['request'] not in \ self.context.model['operations']: error = 1 locator = 'request' if request in ['Transaction', 'Harvest']: code = 'OperationNotSupported' text = '%s operations are not supported' % request else: code = 'InvalidParameterValue' text = 'Invalid value for request: %s' % request if error == 1: # return an ExceptionReport LOGGER.error('basic service options error: %s, %s, %s', code, locator, text) self.response = self.iface.exceptionreport(code, locator, text) else: # process per the request value if 'responsehandler' in self.kvp: # set flag to process asynchronously import threading self.async = True request_id = self.kvp.get('requestid', None) if request_id is None: import uuid self.kvp['requestid'] = str(uuid.uuid4()) if self.kvp['request'] == 'GetCapabilities': self.response = self.iface.getcapabilities() elif self.kvp['request'] == 'DescribeRecord': self.response = self.iface.describerecord() elif self.kvp['request'] == 'GetDomain': self.response = self.iface.getdomain() elif self.kvp['request'] == 'GetRecords': if self.async: # process asynchronously threading.Thread(target=self.iface.getrecords).start() self.response = self.iface._write_acknowledgement() else: self.response = self.iface.getrecords() elif self.kvp['request'] == 'GetRecordById': self.response = self.iface.getrecordbyid() elif self.kvp['request'] == 'GetRepositoryItem': self.response = self.iface.getrepositoryitem() elif self.kvp['request'] == 'Transaction': self.response = self.iface.transaction() elif self.kvp['request'] == 'Harvest': if self.async: # process asynchronously threading.Thread(target=self.iface.harvest).start() self.response = self.iface._write_acknowledgement() else: self.response = self.iface.harvest() else: self.response = self.iface.exceptionreport( 'InvalidParameterValue', 'request', 'Invalid request parameter: %s' % self.kvp['request'] ) LOGGER.info('Request processed') if self.mode == 'sru': LOGGER.info('SRU mode detected; processing response.') self.response = self.sru().response_csw2sru(self.response, self.environ) elif self.mode == 'opensearch': LOGGER.info('OpenSearch mode detected; processing response.') self.response = self.opensearch().response_csw2opensearch( self.response, self.config) elif self.mode == 'oaipmh': LOGGER.info('OAI-PMH mode detected; processing response.') self.response = self.oaipmh().response( self.response, self.oaiargs, self.repository, self.config.get('server', 'url') ) return self._write_response() def getcapabilities(self): """ Handle GetCapabilities request """ return self.iface.getcapabilities() def describerecord(self): """ Handle DescribeRecord request """ return self.iface.describerecord() def getdomain(self): """ Handle GetDomain request """ return self.iface.getdomain() def getrecords(self): """ Handle GetRecords request """ return self.iface.getrecords() def getrecordbyid(self, raw=False): """ Handle GetRecordById request """ return self.iface.getrecordbyid(raw) def getrepositoryitem(self): """ Handle GetRepositoryItem request """ return self.iface.getrepositoryitem() def transaction(self): """ Handle Transaction request """ return self.iface.transaction() def harvest(self): """ Handle Harvest request """ return self.iface.harvest() def _write_response(self): """ Generate response """ # set HTTP response headers and XML declaration xmldecl = '' appinfo = '' LOGGER.info('Writing response.') if hasattr(self, 'soap') and self.soap: self._gen_soap_wrapper() if etree.__version__ >= '3.5.0': # remove superfluous namespaces etree.cleanup_namespaces(self.response, keep_ns_prefixes=self.context.keep_ns_prefixes) response = etree.tostring(self.response, pretty_print=self.pretty_print, encoding='unicode') if (isinstance(self.kvp, dict) and 'outputformat' in self.kvp and self.kvp['outputformat'] == 'application/json'): self.contenttype = self.kvp['outputformat'] from pycsw.core.formats import fmt_json response = fmt_json.xml2json(response, self.context.namespaces, self.pretty_print) else: # it's XML if 'outputformat' in self.kvp: self.contenttype = self.kvp['outputformat'] else: self.contenttype = self.mimetype xmldecl = ('<?xml version="1.0" encoding="%s" standalone="no"?>' '\n' % self.encoding) appinfo = '<!-- pycsw %s -->\n' % self.context.version if isinstance(self.contenttype, bytes): self.contenttype = self.contenttype.decode() s = (u'%s%s%s' % (xmldecl, appinfo, response)).encode(self.encoding) LOGGER.debug('Response code: %s', self.context.response_codes[self.status]) LOGGER.debug('Response:\n%s', s) return [self.context.response_codes[self.status], s] def _gen_soap_wrapper(self): """ Generate SOAP wrapper """ LOGGER.info('Writing SOAP wrapper.') node = etree.Element( util.nspath_eval('soapenv:Envelope', self.context.namespaces), nsmap=self.context.namespaces ) schema_location_ns = util.nspath_eval('xsi:schemaLocation', self.context.namespaces) node.attrib[schema_location_ns] = '%s %s' % ( self.context.namespaces['soapenv'], self.context.namespaces['soapenv'] ) node2 = etree.SubElement( node, util.nspath_eval('soapenv:Body', self.context.namespaces)) if self.exception: node3 = etree.SubElement( node2, util.nspath_eval('soapenv:Fault', self.context.namespaces) ) node4 = etree.SubElement( node3, util.nspath_eval('soapenv:Code', self.context.namespaces) ) etree.SubElement( node4, util.nspath_eval('soapenv:Value', self.context.namespaces) ).text = 'soap:Server' node4 = etree.SubElement( node3, util.nspath_eval('soapenv:Reason', self.context.namespaces) ) etree.SubElement( node4, util.nspath_eval('soapenv:Text', self.context.namespaces) ).text = 'A server exception was encountered.' node4 = etree.SubElement( node3, util.nspath_eval('soapenv:Detail', self.context.namespaces) ) node4.append(self.response) else: node2.append(self.response) self.response = node def _gen_manager(self): """ Update self.context.model with CSW-T advertising """ if (self.config.has_option('manager', 'transactions') and self.config.get('manager', 'transactions') == 'true'): self.manager = True self.context.model['operations_order'].append('Transaction') self.context.model['operations']['Transaction'] = { 'methods': {'get': False, 'post': True}, 'parameters': {} } schema_values = [ 'http://www.opengis.net/cat/csw/2.0.2', 'http://www.opengis.net/cat/csw/3.0', 'http://www.opengis.net/wms', 'http://www.opengis.net/wmts/1.0', 'http://www.opengis.net/wfs', 'http://www.opengis.net/wfs/2.0', 'http://www.opengis.net/wcs', 'http://www.opengis.net/wps/1.0.0', 'http://www.opengis.net/sos/1.0', 'http://www.opengis.net/sos/2.0', 'http://www.isotc211.org/2005/gmi', 'urn:geoss:waf', ] self.context.model['operations_order'].append('Harvest') self.context.model['operations']['Harvest'] = { 'methods': {'get': False, 'post': True}, 'parameters': { 'ResourceType': {'values': schema_values} } } self.context.model['operations']['Transaction'] = { 'methods': {'get': False, 'post': True}, 'parameters': { 'TransactionSchemas': {'values': sorted(schema_values)} } } self.csw_harvest_pagesize = 10 if self.config.has_option('manager', 'csw_harvest_pagesize'): self.csw_harvest_pagesize = int( self.config.get('manager', 'csw_harvest_pagesize')) def _test_manager(self): """ Verify that transactions are allowed """ if self.config.get('manager', 'transactions') != 'true': raise RuntimeError('CSW-T interface is disabled') ipaddress = self.environ['REMOTE_ADDR'] if not self.config.has_option('manager', 'allowed_ips') or \ (self.config.has_option('manager', 'allowed_ips') and not util.ipaddress_in_whitelist(ipaddress, self.config.get('manager', 'allowed_ips').split(','))): raise RuntimeError( 'CSW-T operations not allowed for this IP address: %s' % ipaddress) def _cql_update_queryables_mappings(self, cql, mappings): """ Transform CQL query's properties to underlying DB columns """ LOGGER.debug('Raw CQL text = %s', cql) LOGGER.debug(str(list(mappings.keys()))) if cql is not None: for key in mappings.keys(): try: cql = cql.replace(key, mappings[key]['dbcol']) except: cql = cql.replace(key, mappings[key]) LOGGER.debug('Interpolated CQL text = %s.', cql) return cql def _process_responsehandler(self, xml): """ Process response handler """ if self.kvp['responsehandler'] is not None: LOGGER.info('Processing responsehandler %s' % self.kvp['responsehandler']) uprh = urlparse(self.kvp['responsehandler']) if uprh.scheme == 'mailto': # email import smtplib LOGGER.debug('Email detected') smtp_host = 'localhost' if self.config.has_option('server', 'smtp_host'): smtp_host = self.config.get('server', 'smtp_host') body = ('Subject: pycsw %s results\n\n%s' % (self.kvp['request'], xml)) try: LOGGER.info('Sending email') msg = smtplib.SMTP(smtp_host) msg.sendmail( self.config.get('metadata:main', 'contact_email'), uprh.path, body ) msg.quit() LOGGER.debug('Email sent successfully.') except Exception as err: LOGGER.exception('Error processing email') elif uprh.scheme == 'ftp': import ftplib LOGGER.debug('FTP detected.') try: LOGGER.info('Sending to FTP server.') ftp = ftplib.FTP(uprh.hostname) if uprh.username is not None: ftp.login(uprh.username, uprh.password) ftp.storbinary('STOR %s' % uprh.path[1:], StringIO(xml)) ftp.quit() LOGGER.debug('FTP sent successfully.') except Exception as err: LOGGER.exception('Error processing FTP') @staticmethod def normalize_kvp(kvp): """Normalize Key Value Pairs. This method will transform all keys to lowercase and leave values unchanged, as specified in the CSW standard (see for example note C on Table 62 - KVP Encoding for DescribeRecord operation request of the CSW standard version 2.0.2) :arg kvp: a mapping with Key Value Pairs :type kvp: dict :returns: A new dictionary with normalized parameters """ result = dict() for name, value in kvp.items(): result[name.lower()] = value return result
def deploy_django(proj): """ Deploy a Django project """ wsgi_base_path = os.environ.get('WSGI_BASE_PATH', '/var/www/wsgi') httpd_conf_dir = os.environ.get('HTTPD_CONF_DIR', '/etc/httpd/locations.d') httpd_host = os.environ.get('HTTPD_HOST', platform.node()) httpd_media_base = os.environ.get('HTTPD_MEDIA_BASE', '/var/www/html/media') httpd_static_base = os.environ.get('HTTPD_STATIC_BASE', '/var/www/html/static') secret_key_gen = os.environ.get('SECRET_KEY_GEN', '/usr/bin/pwgen -c -n -y 78 1') proj_base = os.path.join(wsgi_base_path, proj) path = lambda p: os.path.join(proj_base, p) proj_defaults = { 'name': proj, 'proj_base': proj_base, 'dst': '%(name)s-project', 'settings': '%(name)s_production', 'url': '/%(name)s', 'build': 'build/build.sh', 'wsgi': 'wsgi.py', 'allowed_hosts': httpd_host, 'secret_key': subprocess.check_output(secret_key_gen.split() ).strip().replace("'", "-"), 'media_root': os.path.join(httpd_media_base, proj), 'static_root': os.path.join(httpd_static_base, proj), 'scm': '/usr/bin/git', 'settings_append': DEFAULT_SETTINGS_APPEND, 'deploy_requires': None, 'deploy_commands': ['migrate'] } # Protect '%' from interpolation proj_defaults['secret_key'] = re.sub(r'%', r'', proj_defaults['secret_key']) # Choose clone command proj_defaults['scm_clone'] = SCM_DEFAULT_CHECKOUT[os.path.split( proj_defaults['scm'])[-1]] # Load defaults cfg = SafeConfigParser(proj_defaults) # Force read cfg.readfp(open(proj+'.cfg', 'r')) #logger.debug('Final configuration:') #for k,v in cfg.items(CFG_SECTION): # logger.debug('\t%s: %s', k, v) # Create directory os.mkdir(proj_base) # Virtualenv virtualenv.create_environment(proj_base) # Checkout subprocess.check_call([ cfg.get(CFG_SECTION, 'scm'), cfg.get(CFG_SECTION, 'scm_clone'), cfg.get(CFG_SECTION, 'src'), path(cfg.get(CFG_SECTION, 'dst')), ]) # Build activate = path('bin/activate') build = os.path.join( cfg.get(CFG_SECTION, 'dst'), cfg.get(CFG_SECTION, 'build') ) subprocess.check_call([build], cwd=proj_base, env={'BASH_ENV': activate}) # Install Deploy Requiremts deploy_requires = cfg.get(CFG_SECTION, 'deploy_requires') if deploy_requires: logger.debug('Installing: %s', deploy_requires) cmd = [os.path.join(virtualenv.path_locations(proj_base)[-1], 'pip') , 'install'] cmd.extend(parse_list(deploy_requires)) subprocess.check_call(cmd) # Create settings settings_file = path(cfg.get(CFG_SECTION, 'settings'))+'.py' slock = LockFile(settings_file) slock.acquire() if os.path.exists(settings_file): slock.release() raise IOError([17, 'File exists']) try: sfp = open(settings_file, 'w') print(DJANGO_SETTINGS_TEMPLATE % dict(cfg.items(CFG_SECTION)), file=sfp) sfp.close() finally: slock.release() # Create wsgi script wsgi_file = path(cfg.get(CFG_SECTION, 'wsgi')) slock = LockFile(wsgi_file) slock.acquire() if os.path.exists(wsgi_file): slock.release() raise IOError([17, 'File exists']) try: wfp = open(wsgi_file, 'w') print(WSGI_TEMPLATE % dict(cfg.items(CFG_SECTION)), file=wfp) wfp.close() finally: slock.release() # Create apache conf conf_file = os.path.join(httpd_conf_dir, cfg.get(CFG_SECTION, 'name'))+'.conf' slock = LockFile(conf_file) slock.acquire() if os.path.exists(conf_file): slock.release() raise IOError([17, 'File exists']) try: sfp = open(conf_file, 'w') conf = dict(cfg.items(CFG_SECTION)) conf['site_libs'] = os.path.join( virtualenv.path_locations(proj_base)[1], 'site-packages') http_conf = HTTPD_CONF_TEMPLATE % conf print(http_conf, file=sfp) sfp.close() finally: slock.release() # Perform django commands deploy_commands = cfg.get(CFG_SECTION, 'deploy_commands') if deploy_commands: manage = [os.path.join(virtualenv.path_locations(proj_base)[-1], virtualenv.expected_exe) , 'manage.py'] os.chdir(path(cfg.get(CFG_SECTION, 'dst'))) # Deployment django environment dep_env = os.environ.copy() dep_env['DJANGO_SETTINGS_MODULE'] = cfg.get(CFG_SECTION, 'settings') dep_env['PYTHONPATH'] = path('.') logger.debug('Environment for commands: PYTHONPATH=%s', dep_env['PYTHONPATH']) logger.debug(' Django settings: %s', dep_env['DJANGO_SETTINGS_MODULE']) for cmd in parse_list(deploy_commands): logger.debug("Executing '%s'", ' '.join(manage+[cmd])) subprocess.check_call(manage+cmd.split(), env=dep_env) # That's it. Remember to reload apache print('You should reload apache:\n', '\t', 'systemctl reload httpd') return True
def parse_args(args_str): args_obj = None # Source any specified config/ini file # Turn off help, so we print all options in response to -h conf_parser = argparse.ArgumentParser(add_help=False) conf_parser.add_argument("-c", "--conf_file", action='append', help="Specify config file", metavar="FILE") args, remaining_argv = conf_parser.parse_known_args(args_str.split()) defaults = { 'reset_config': False, 'wipe_config': False, 'listen_ip_addr': _WEB_HOST, 'listen_port': _WEB_PORT, 'admin_port': _ADMIN_PORT, 'cassandra_server_list': "127.0.0.1:9160", 'collectors': None, 'http_server_port': '8084', 'http_server_ip': _WEB_HOST, 'log_local': True, 'log_level': SandeshLevel.SYS_NOTICE, 'log_category': '', 'log_file': Sandesh._DEFAULT_LOG_FILE, 'trace_file': '/var/log/contrail/vnc_openstack.err', 'use_syslog': False, 'syslog_facility': Sandesh._DEFAULT_SYSLOG_FACILITY, 'logging_conf': '', 'logger_class': None, 'multi_tenancy': None, 'aaa_mode': None, 'zk_server_ip': '127.0.0.1:2181', 'worker_id': '0', 'rabbit_server': 'localhost', 'rabbit_port': '5672', 'rabbit_user': '******', 'rabbit_password': '******', 'rabbit_vhost': None, 'rabbit_ha_mode': False, 'rabbit_max_pending_updates': '4096', 'rabbit_health_check_interval': '120.0', # in seconds 'cluster_id': '', 'max_requests': 1024, 'paginate_count': 256, 'region_name': 'RegionOne', 'stale_lock_seconds': '5', # lock but no resource past this => stale 'cloud_admin_role': cfgm_common.CLOUD_ADMIN_ROLE, 'global_read_only_role': cfgm_common.GLOBAL_READ_ONLY_ROLE, 'rabbit_use_ssl': False, 'kombu_ssl_version': '', 'kombu_ssl_keyfile': '', 'kombu_ssl_certfile': '', 'kombu_ssl_ca_certs': '', 'object_cache_entries': '10000', # max number of objects cached for read 'object_cache_exclude_types': '', # csv of object types to *not* cache 'debug_object_cache_types': '', # csv of object types to debug cache 'db_engine': 'cassandra', 'max_request_size': 1024000, 'amqp_timeout': 660, 'config_api_ssl_enable': False, 'config_api_ssl_keyfile': '', 'config_api_ssl_certfile': '', 'config_api_ssl_ca_cert': '', 'tcp_keepalive_enable': True, 'tcp_keepalive_idle_time': 7200, 'tcp_keepalive_interval': 75, 'tcp_keepalive_probes': 9, } defaults.update(SandeshConfig.get_default_options(['DEFAULTS'])) # keystone options ksopts = { 'signing_dir': '/var/lib/contrail/keystone-signing', 'auth_host': '127.0.0.1', 'auth_port': '35357', 'auth_protocol': 'http', 'admin_user': '', 'admin_password': '', 'admin_tenant_name': '', 'admin_user_domain_name': None, 'identity_uri': None, 'project_domain_name': None, 'insecure': True, 'cafile': '', 'certfile': '', 'keyfile': '', 'auth_type': 'password', 'auth_url': '', 'default_domain_id': 'default', } # cassandra options cassandraopts = { 'cassandra_user' : None, 'cassandra_password' : None } # sandesh options sandeshopts = SandeshConfig.get_default_options() config = None saved_conf_file = args.conf_file if args.conf_file: config = SafeConfigParser({'admin_token': None}, allow_no_value=True) config.read(args.conf_file) if 'DEFAULTS' in config.sections(): defaults.update(dict(config.items("DEFAULTS"))) if 'multi_tenancy' in config.options('DEFAULTS'): defaults['multi_tenancy'] = config.getboolean( 'DEFAULTS', 'multi_tenancy') if 'default_encoding' in config.options('DEFAULTS'): default_encoding = config.get('DEFAULTS', 'default_encoding') gen.resource_xsd.ExternalEncoding = default_encoding if 'KEYSTONE' in config.sections(): ksopts.update(dict(config.items("KEYSTONE"))) if 'QUOTA' in config.sections(): for (k, v) in config.items("QUOTA"): try: if str(k) != 'admin_token': vnc_quota.QuotaHelper.default_quota[str(k)] = int(v) except ValueError: pass if 'CASSANDRA' in config.sections(): cassandraopts.update(dict(config.items('CASSANDRA'))) SandeshConfig.update_options(sandeshopts, config) # Override with CLI options # Don't surpress add_help here so it will handle -h parser = argparse.ArgumentParser( # Inherit options from config_parser parents=[conf_parser], # print script description with -h/--help description=__doc__, # Don't mess with format of description formatter_class=argparse.RawDescriptionHelpFormatter, ) defaults.update(ksopts) defaults.update(cassandraopts) defaults.update(sandeshopts) parser.set_defaults(**defaults) parser.add_argument( "--cassandra_server_list", help="List of cassandra servers in IP Address:Port format", nargs='+') parser.add_argument( "--cassandra_use_ssl", action="store_true", help="Enable TLS for cassandra connection") parser.add_argument( "--cassandra_ca_certs", help="Cassandra CA certs") parser.add_argument( "--redis_server_ip", help="IP address of redis server") parser.add_argument( "--redis_server_port", help="Port of redis server") parser.add_argument( "--auth", choices=['keystone', 'noauth', 'no-auth'], help="Type of authentication for user-requests") parser.add_argument( "--reset_config", action="store_true", help="Warning! Destroy previous configuration and start clean") parser.add_argument( "--wipe_config", action="store_true", help="Warning! Destroy previous configuration") parser.add_argument( "--listen_ip_addr", help="IP address to provide service on, default %s" % (_WEB_HOST)) parser.add_argument( "--listen_port", help="Port to provide service on, default %s" % (_WEB_PORT)) parser.add_argument( "--admin_port", help="Port with local auth for admin access, default %s" % (_ADMIN_PORT)) parser.add_argument( "--collectors", help="List of VNC collectors in ip:port format", nargs="+") parser.add_argument( "--http_server_port", help="Port of Introspect HTTP server") parser.add_argument( "--http_server_ip", help="IP address of Introspect HTTP server, default %s" % (_WEB_HOST)) parser.add_argument( "--log_local", action="store_true", help="Enable local logging of sandesh messages") parser.add_argument( "--log_level", help="Severity level for local logging of sandesh messages") parser.add_argument( "--logging_conf", help=("Optional logging configuration file, default: None")) parser.add_argument( "--logger_class", help=("Optional external logger class, default: None")) parser.add_argument( "--log_category", help="Category filter for local logging of sandesh messages") parser.add_argument( "--log_file", help="Filename for the logs to be written to") parser.add_argument( "--trace_file", help="Filename for the errors backtraces to be written to") parser.add_argument("--use_syslog", action="store_true", help="Use syslog for logging") parser.add_argument("--syslog_facility", help="Syslog facility to receive log lines") parser.add_argument( "--multi_tenancy", action="store_true", help="Validate resource permissions (implies token validation)") parser.add_argument( "--aaa_mode", choices=AAA_MODE_VALID_VALUES, help="AAA mode") parser.add_argument( "--worker_id", help="Worker Id") parser.add_argument( "--zk_server_ip", help="Ip address:port of zookeeper server") parser.add_argument( "--rabbit_server", help="Rabbitmq server address") parser.add_argument( "--rabbit_port", help="Rabbitmq server port") parser.add_argument( "--rabbit_user", help="Username for rabbit") parser.add_argument( "--rabbit_vhost", help="vhost for rabbit") parser.add_argument( "--rabbit_password", help="password for rabbit") parser.add_argument( "--rabbit_ha_mode", help="True if the rabbitmq cluster is mirroring all queue") parser.add_argument( "--rabbit_max_pending_updates", help="Max updates before stateful changes disallowed") parser.add_argument( "--rabbit_health_check_interval", help="Interval seconds between consumer heartbeats to rabbitmq") parser.add_argument( "--cluster_id", help="Used for database keyspace separation") parser.add_argument( "--max_requests", type=int, help="Maximum number of concurrent requests served by api server") parser.add_argument( "--paginate_count", type=int, help="Default number of items when pagination is requested") parser.add_argument("--cassandra_user", help="Cassandra user name") parser.add_argument("--cassandra_password", help="Cassandra password") parser.add_argument("--stale_lock_seconds", help="Time after which lock without resource is stale, default 60") parser.add_argument( "--cloud_admin_role", help="Role name of cloud administrator") parser.add_argument( "--global_read_only_role", help="Role name of user with Read-Only access to all objects") parser.add_argument("--object_cache_entries", help="Maximum number of objects cached for read, default 10000") parser.add_argument("--object_cache_exclude_types", help="Comma separated values of object types to not cache") parser.add_argument( "--debug_object_cache_types", help="Comma separated values of object types to debug trace between " "the cache and the DB") parser.add_argument("--db_engine", help="Database engine to use, default cassandra") parser.add_argument("--max_request_size", type=int, help="Maximum size of bottle requests served by api server") parser.add_argument("--amqp_timeout", help="Timeout for amqp request") SandeshConfig.add_parser_arguments(parser) args_obj, remaining_argv = parser.parse_known_args(remaining_argv) args_obj.conf_file = args.conf_file args_obj.config_sections = config if isinstance(args_obj.cassandra_server_list, string_types): args_obj.cassandra_server_list =\ args_obj.cassandra_server_list.split() if isinstance(args_obj.collectors, string_types): args_obj.collectors = args_obj.collectors.split() args_obj.sandesh_config = SandeshConfig.from_parser_arguments(args_obj) args_obj.cassandra_use_ssl = (str(args_obj.cassandra_use_ssl).lower() == 'true') args_obj.config_api_ssl_enable = (str(args_obj.config_api_ssl_enable).lower() == 'true') # convert log_local to a boolean if not isinstance(args_obj.log_local, bool): args_obj.log_local = bool(literal_eval(args_obj.log_local)) args_obj.conf_file = saved_conf_file return args_obj, remaining_argv
class IniConfigLoader(object): """This config loader transforms a traditional INI file into a Montague Standard Format dictionary. It is compatible with most but not all PasteDeploy files.""" def __init__(self, path): self.path = path self._data = self._read() self._config = self._process() def _read(self): # We need to keep the parser around so the logging conversion can use it. path_defaults = { 'here': os.path.dirname(self.path), '__file__': self.path, } self._parser = SafeConfigParser() self._parser.read(self.path) self._globals = self._parser.defaults() data = {} for section in self._parser.sections(): section_data = data.setdefault(section, {}) for option in self._parser.options(section): if option in self._globals: continue try: section_data[option] = self._parser.get(section, option, vars=path_defaults) except InterpolationError: section_data[option] = self._parser.get(section, option, raw=True) return data def _process(self): orig = self._data config = {} for key in six.iterkeys(orig): if ':' in key: scheme, name = key.split(':', 1) kind_config = config.setdefault(SCHEMEMAP[scheme], {}) kind_config[name] = orig[key] else: config[key] = orig[key] config['globals'] = { 'here': os.path.dirname(self.path), '__file__': self.path, } for key, value in six.iteritems(self._globals): config['globals'][key] = value apps = config.setdefault('application', {}) filters = config.setdefault('filter', {}) generated_filter_count = 0 filter_apps = config.pop('filter-app', {}) for name, filter_app in six.iteritems(filter_apps): use = filter_app.pop('next') generated_filter_count += 1 filter_name = '_montague_filter_{0}'.format(generated_filter_count) apps[name] = {'use': use, 'filter-with': filter_name} filters[filter_name] = filter_app pipelines = config.pop('pipeline', {}) for name, pipeline in six.iteritems(pipelines): items = pipeline['pipeline'].split() pipeline_app = items[-1] pipeline_filters = items[:-1] pipeline_filters.reverse() apps[name] = {'use': pipeline_app} last_item = apps[name] for count, use_filter in enumerate(pipeline_filters, start=1): filter_name = '_montague_pipeline_{0}_filter_{1}'.format(name, count) filters[filter_name] = {'use': use_filter} last_item['filter-with'] = filter_name last_item = filters[filter_name] if all([self._parser.has_section(section_name) for section_name in LOGGING_SECTIONS]): loggers = convert_loggers(self._parser) handlers = convert_handlers(self._parser) formatters = convert_formatters(self._parser) config['logging'] = {'main': combine(loggers, handlers, formatters)} for key in MSF_KEYS: config.setdefault(key, {}) return config def config(self): return self._config def app_config(self, name): # This method isn't actually necessary, since montague can extract # the config information from the MSF dict returned by .config() # but it's a nice example of how to do it. if name in self._config['application']: constructor = LoadableConfig.app local_config = self._config['application'][name] elif name in self._config['composite']: constructor = LoadableConfig.composite local_config = self._config['composite'][name] else: raise KeyError return constructor( name=name, config=local_config, global_config=self._config['globals']) def server_config(self, name): if name in self._config['server']: constructor = LoadableConfig.server local_config = self._config['server'][name] else: raise KeyError return constructor( name=name, config=local_config, global_config=self._config['globals']) def filter_config(self, name): if name in self._config['filter']: constructor = LoadableConfig.filter local_config = self._config['filter'][name] else: raise KeyError return constructor( name=name, config=local_config, global_config=self._config['globals']) def logging_config(self, name): # This is provided by .config(), so no need to implement it here. raise NotImplementedError
class GlobusConfigParser(object): """ Wraps a SafeConfigParser to do modified get()s and config file loading. """ _GENERAL_CONF_SECTION = 'general' def __init__(self): self._parser = SafeConfigParser() self._load_config() def _load_config(self): # TODO: /etc is not windows friendly, not sure about expanduser self._read([_get_lib_config_path(), "/etc/globus.cfg", os.path.expanduser("~/.globus.cfg")]) def _read(self, filenames): """ Wraps up self._parser.read() to inject '[general]\n' at the beginning of file contents. Originally, this was implemented by catching MissingSectionHeaderErrors, but that's actually overcomplicated and unnecessary. Always inserting it, uniformly, is simpler and doesn't change the semantics of config parsing at all. """ for fname in filenames: try: with open(fname) as f: # wrap the file-like object in a StringIO so that we can # pass it to the SafeConfigParser as a file like object wrapped_file = StringIO( '[{}]\n'.format(self._GENERAL_CONF_SECTION) + f.read()) try: self._parser.readfp(wrapped_file, fname) except DuplicateSectionError: f.seek(0) self._parser.readfp(f, fname) except IOError: continue def get(self, option, section=None, environment=None, failover_to_general=False, check_env=False, type_cast=str): """ Attempt to lookup an option in the config file. Optionally failover to the general section if the option is not found. Also optionally, check for a relevant environment variable, which is named always as GLOBUS_SDK_{option.upper()}. Note that 'section' doesn't slot into the naming at all. Otherwise, we'd have to contend with GLOBUS_SDK_GENERAL_... for almost everything, and GLOBUS_SDK_ENVIRONMENT\ PROD_... which is awful. Returns None for an unfound key, rather than raising a NoOptionError. """ # envrionment is just a fancy name for sections that start with # 'environment ' if environment: section = 'environment ' + environment # if you don't specify a section or an environment, assume it's the # general conf section if section is None: section = self._GENERAL_CONF_SECTION # if this is a config option which checks the shell env, look there # *first* for a value -- env values have higher precedence than config # files so that you can locally override the behavior of a command in a # given shell or subshell env_option_name = 'GLOBUS_SDK_{}'.format(option.upper()) value = None if check_env and env_option_name in os.environ: value = os.environ[env_option_name] else: try: value = self._parser.get(section, option) except (NoOptionError, NoSectionError): if failover_to_general: value = self.get(option, section=self._GENERAL_CONF_SECTION) if value is not None: value = type_cast(value) return value
def get(self, name, option_name, *args, **kwargs): section_name = self._get_section_name(name) return SafeConfigParser.get(self, section_name, option_name, *args, **kwargs)