def read_conf(conf_path, section_name=None, defaults=None, use_yaml=False): if use_yaml: return parse_config(conf_path) if defaults is None: defaults = {} parser = SafeConfigParser(defaults) success = parser.read(conf_path) if not success: print("Unable to read config from %s" % conf_path) sys.exit(1) if section_name: if parser.has_section(section_name): # if log_format is set, extract it from the parser # to prevent to expand variables which can, in case of # log_format throw a ConfigParser.InterpolationMissingOptionError log_format = None if parser.has_option(section_name, 'log_format'): log_format = parser.get(section_name, 'log_format', raw=True) # don't use remove_options because it can fail without reason parser.set(section_name, 'log_format', '') conf = dict(parser.items(section_name)) # Add log_format again, after parsing if log_format: conf['log_format'] = log_format else: print('Unable to find section %s in config %s' % (section_name, conf_path)) exit(1) else: conf = {} for section in parser.sections(): # if log_format is set, extract it from the parser # to prevent to expand variables which can, in case of # log_format throw a ConfigParser.InterpolationMissingOptionError log_format = None if parser.has_option(section, 'log_format'): log_format = parser.get(section, 'log_format', raw=True) # don't use remove_options because it can fail without reason parser.set(section, 'log_format', '') conf.update({section: dict(parser.items(section))}) # Add log_format again, after parsing if log_format: conf[section]['log_format'] = log_format return conf
def get_download_params(self, example_id): """Returns the local configuration file for this example_id""" config_filename = self.get_local_path(example_id) parser = SafeConfigParser() parser.read(config_filename) config_section = dict( parser.items('config')) if 'config' in parser else {} other_sections = { key: value for key, value in parser.items() if key != 'config' and key != 'DEFAULT' } return config_section, other_sections
def set_namespace_options(namespace, options, remove=None): """ Set options in the local namespace configuration file. Can have nasty effects, be careful, only use in test code. :param namespace: the namespace to work with :param options: a dictionary with options to set :param remove: an iterable of options to remove :returns: a dictionary with all options of the namespace """ parser = SafeConfigParser({}) potential_confs = list() actual_confs = list() for p, _ in _config_paths(): potential_confs.append(p) fdone = parser.read((p,)) actual_confs.extend(fdone) if not actual_confs: raise ValueError( "Could not read configuration from any of %s" % potential_confs) if not parser.has_section(namespace): print('Namespace %s was not found in %s' % (namespace, actual_confs)) parser.add_section(namespace) for key, val in options.items(): parser.set(namespace, key, str(val)) if remove: for key in remove: parser.remove_option(namespace, key) with open(actual_confs[-1], 'w') as outfile: parser.write(outfile) return dict(parser.items(namespace))
def getManifest(fp, format, defaults=None): """Read the manifest from the given open file pointer according to the given ManifestFormat. Pass a dict as ``defaults`` to override the defaults from the manifest format. """ if defaults is None: defaults = format.defaults parser = SafeConfigParser() if six.PY2: parser.readfp(fp) else: data = fp.read() if isinstance(data, six.binary_type): data = data.decode() parser.read_string(data) results = {} for key in format.keys: if parser.has_option(format.resourceType, key): results[key] = parser.get(format.resourceType, key) else: results[key] = defaults.get(key, None) for key in format.parameterSections: sectionName = "%s:%s" % (format.resourceType, key,) if parser.has_section(sectionName): results[key] = dict(parser.items(sectionName)) else: results[key] = {} return results
class Config(object): def __init__(self): self.config = SafeConfigParser() self.name = '' def parse(self, fname, override): if override: override = [x for x in csv.reader( ' '.join(override).split(','), delimiter='.')] logger.info('Reading configuration file: {}'.format(fname)) if not os.path.isfile(fname): logger.interrupt('File doesn\'t exist: {}'.format(fname)) self.config.optionxform = str self.config.read(fname) for section, option, value in override: if not self.config.has_section(section): self.config.add_section(section) self.config.set(section, option, value) basename = os.path.basename(fname) self.name = os.path.splitext(basename)[0] @safe def _get_options_as_dict(self, section): if section in self.config.sections(): return {p: v for p, v in self.config.items(section)} else: return {}
def getManifest(fp, format, defaults=None): """Read the manifest from the given open file pointer according to the given ManifestFormat. Pass a dict as ``defaults`` to override the defaults from the manifest format. """ if defaults is None: defaults = format.defaults parser = SafeConfigParser() parser.readfp(fp) results = {} for key in format.keys: if parser.has_option(format.resourceType, key): results[key] = parser.get(format.resourceType, key) else: results[key] = defaults.get(key, None) for key in format.parameterSections: sectionName = "%s:%s" % (format.resourceType, key,) if parser.has_section(sectionName): results[key] = dict(parser.items(sectionName)) else: results[key] = {} return results
def getPluginSettings(themeDirectory, plugins=None): """Given an IResourceDirectory for a theme, return the settings for the given list of plugins (or all plugins, if not given) provided as a list of (name, plugin) pairs. Returns a dict of dicts, with the outer dict having plugin names as keys and containing plugins settings (key/value pairs) as values. """ if plugins is None: plugins = getPlugins() manifestContents = {} if themeDirectory.isFile(MANIFEST_FILENAME): parser = SafeConfigParser() fp = themeDirectory.openFile(MANIFEST_FILENAME) try: parser.readfp(fp) for section in parser.sections(): manifestContents[section] = {} for name, value in parser.items(section): manifestContents[section][name] = value finally: try: fp.close() except AttributeError: pass pluginSettings = {} for name, plugin in plugins: pluginSettings[name] = manifestContents.get( "{0:s}:{1:s}".format(THEME_RESOURCE_NAME, name), {} ) return pluginSettings
class Config(object): """A ConfigParser wrapper to support defaults when calling instance methods, and also tied to a single section""" SECTION = 'scrapyd' def __init__(self, values=None, extra_sources=()): if values is None: sources = self._getsources() default_config = get_data(__package__, 'default_scrapyd.conf').decode('utf8') self.cp = SafeConfigParser() self.cp.readfp(io.StringIO(default_config)) sources.extend(extra_sources) for fname in sources: try: with io.open(fname) as fp: self.cp.readfp(fp) except (IOError, OSError): pass else: self.cp = SafeConfigParser(values) self.cp.add_section(self.SECTION) def _getsources(self): sources = ['/etc/scrapyd/scrapyd.conf', r'c:\scrapyd\scrapyd.conf'] sources += sorted(glob.glob('/etc/scrapyd/conf.d/*')) sources += ['scrapyd.conf'] sources += [expanduser('~/.scrapyd.conf')] scrapy_cfg = closest_scrapy_cfg() if scrapy_cfg: sources.append(scrapy_cfg) return sources def _getany(self, method, option, default): try: return method(self.SECTION, option) except (NoSectionError, NoOptionError): if default is not None: return default raise def get(self, option, default=None): return self._getany(self.cp.get, option, default) def getint(self, option, default=None): return self._getany(self.cp.getint, option, default) def getfloat(self, option, default=None): return self._getany(self.cp.getfloat, option, default) def getboolean(self, option, default=None): return self._getany(self.cp.getboolean, option, default) def items(self, section, default=None): try: return self.cp.items(section) except (NoSectionError, NoOptionError): if default is not None: return default raise
class Config(object): """A ConfigParser wrapper to support defaults when calling instance methods, and also tied to a single section""" SECTION = 'silkyy' def __init__(self, values=None, extra_sources=()): if values is None: sources = self._getsources() self.cp = ConfigParser() if __package__: default_config = ensure_str(get_data(__package__, 'default.conf')) self._load_config_file(StringIO(default_config)) for source in sources: if os.path.exists(source): self._load_config_file(open(source)) else: self.cp = SafeConfigParser(values) self.cp.add_section(self.SECTION) def _load_config_file(self, fp): config = StringIO() config.write('[' + self.SECTION + ']' + os.linesep) config.write(fp.read()) config.seek(0, os.SEEK_SET) self.cp.readfp(config) def _getsources(self): sources = ['conf/silkyy.conf'] return sources def get(self, option, default=None): env_key = 'SILKYY_' + option.replace('.', '_').upper() try: return os.getenv(env_key) or self.cp.get(self.SECTION, option) except (NoSectionError, NoOptionError): if default is not None: return default raise def _get(self, option, conv, default=None): return conv(self.get(option, default)) def getint(self, option, default=None): return self._get(option, int, default) def getboolean(self, option, default=None): return self._get(option, str2bool, default) def getfloat(self, option, default=None): return self._get(option, float, default) def items(self, section, default=None): try: return self.cp.items(section) except (NoSectionError, NoOptionError): if default is not None: return default raise
class Config(object): """A ConfigParser wrapper to support defaults when calling instance methods, and also tied to a single section""" SECTION = 'scrapydartx' def __init__(self, values=None, extra_sources=()): if values is None: sources = self._getsources() default_config = get_data(__package__, 'default_scrapyd.conf').decode('utf8') self.cp = SafeConfigParser() self.cp.readfp(StringIO(default_config)) self.cp.read(sources) for fp in extra_sources: self.cp.readfp(fp) else: self.cp = SafeConfigParser(values) self.cp.add_section(self.SECTION) def _getsources(self): sources = [ '/etc/scrapydartx/scrapydartx.conf', r'c:\scrapyd\scrapyd.conf' ] sources += sorted(glob.glob('/etc/scrapydartx/conf.d/*')) sources += ['scrapydartx.conf'] sources += [expanduser('~/.scrapydartx.conf')] scrapy_cfg = closest_scrapy_cfg() if scrapy_cfg: sources.append(scrapy_cfg) return sources def _getany(self, method, option, default): try: return method(self.SECTION, option) except (NoSectionError, NoOptionError): if default is not None: return default raise def get(self, option, default=None): return self._getany(self.cp.get, option, default) def getint(self, option, default=None): return self._getany(self.cp.getint, option, default) def getfloat(self, option, default=None): return self._getany(self.cp.getfloat, option, default) def getboolean(self, option, default=None): return self._getany(self.cp.getboolean, option, default) def items(self, section, default=None): try: return self.cp.items(section) except (NoSectionError, NoOptionError): if default is not None: return default raise
def load_config(config_files=(), overrides=()): cp = SafeConfigParser() cp.optionxform = str # make parsing case-sensitive cp.read(FILENAMES + config_files) for section, option, value in overrides: if value is not None: if isinstance(value, bool): value = int(value) cp.set(section, option, str(value)) return {section: dict(cp.items(section)) for section in cp.sections()}
def parse(file, raise_conflicts=False, separator="."): """ Reads in a config file and convert is to a dictionary where each entry follows the pattern dict["section.key"]="value" """ data = SafeConfigParser() data.read(file) address_dict = {} for s in data.sections(): for k, v in data.items(s): key = '{}{}{}'.format(s, separator, k) address_dict[key] = v return address_dict
def read_conf(conf_path, section_name=None, defaults=None, use_yaml=False): if use_yaml: return parse_config(conf_path) if defaults is None: defaults = {} parser = SafeConfigParser(defaults) success = parser.read(conf_path) if not success: print("Unable to read config from %s" % conf_path) sys.exit(1) if section_name: if parser.has_section(section_name): conf = dict(parser.items(section_name)) else: print('Unable to find section %s in config %s' % (section_name, conf_path)) exit(1) else: conf = {} for section in parser.sections(): conf.update({section: dict(parser.items(section))}) return conf
def get_scrapycfg_targets(cfgfiles=None): cfg = SafeConfigParser() cfg.read(cfgfiles or []) baset = dict(cfg.items('deploy')) if cfg.has_section('deploy') else {} targets = {} targets['default'] = baset for x in cfg.sections(): if x.startswith('deploy:'): t = baset.copy() t.update(cfg.items(x)) targets[x[7:]] = t for tname, t in list(targets.items()): try: int(t.get('project', 0)) except ValueError: # Don't import non-numeric project IDs, and also throw away the # URL and credentials associated with these projects (since the # project ID does not belong to SH, neither do the endpoint or the # auth information) del targets[tname] if t.get('url', "").endswith('scrapyd/'): t['url'] = t['url'][:-8] targets.setdefault('default', {}) return targets
def get_scrapycfg_targets(cfgfiles=None): cfg = SafeConfigParser() cfg.read(cfgfiles or []) baset = dict(cfg.items('deploy')) if cfg.has_section('deploy') else {} targets = {} targets['default'] = baset for x in cfg.sections(): if x.startswith('deploy:'): t = baset.copy() t.update(cfg.items(x)) targets[x[7:]] = t for tname, t in list(targets.items()): try: int(t.get('project', 0)) except ValueError: # Don't import non-numeric project IDs, and also throw away the # URL and credentials associated with these projects (since the # project ID does not belong to SH, neither do the endpoint or the # auth information) del targets[tname] if t.get('url', "").endswith('scrapyd/'): t['url'] = t['url'][:-8] targets.setdefault('default', {}) return targets
def read_ini(cls, path, section=None): """read preferences from an .ini file""" parser = ConfigParser() parser.optionxform = str parser.readfp(mozfile.load(path)) if section: if section not in parser.sections(): raise PreferencesReadError("No section '%s' in %s" % (section, path)) retval = parser.items(section, raw=True) else: retval = parser.defaults().items() # cast the preferences since .ini is just strings return [(i, cls.cast(j)) for i, j in retval]
def read_ini(cls, path, section=None): """read preferences from an .ini file""" parser = ConfigParser() parser.optionxform = str parser.readfp(mozfile.load(path)) if section: if section not in parser.sections(): raise PreferencesReadError("No section '%s' in %s" % (section, path)) retval = parser.items(section, raw=True) else: retval = parser.defaults().items() # cast the preferences since .ini is just strings return [(i, cls.cast(j)) for i, j in retval]
def read_parse_config(filename): """ Reads, validates and create a config dictionary :param filename: The filename to parse :return: return a dict with all config variables or None """ config_dict = dict() config = SafeConfigParser() config.read(filename) # # Sanity checks for sections # if not verify_section_exists(config, 'general'): return None # # Default to port 33706 # config_dict['http_server_port'] = 33706 # # defaults for directories # config_dict['input_path'] = '/tmp/s3-syslog-push/input' config_dict['log_path'] = '/tmp/s3-syslog-push/log' try: os.makedirs(config_dict['input_path'], 0o755) except OSError as e: if e.errno == 17: # FileExistsError # TODO: handle this scenario. Shouldn't happen. pass try: os.makedirs(config_dict['log_path'], 0o755) except OSError as e: if e.errno == 17: # FileExistsError # TODO: handle this scenario. Shouldn't happen. pass for item in config.items('general'): config_dict[item[0]] = item[1] return config_dict
def get_sources(): "Get the checked-out libraries as pairs (name, url) organized by section." from six.moves.configparser import SafeConfigParser parser = SafeConfigParser() assert parser.read('buildout.cfg') == ['buildout.cfg'] # with io.open('buildout.cfg', encoding='utf-8') as f: # parser.readfp(f) from collections import OrderedDict result = OrderedDict() for k in 'libraries', 'extensions', 'examples': result[k] = [(name, browser_url(scm)) for name, scm in parser.items('sources.' + k)] return result
def _load(self): """Load the file list from the index file. @files will be an empty dictionary if the file doesn't exist. """ root_logger.debug("Loading Index file from '%s'", self._index) self.files = {} p = SafeConfigParser() p.optionxform = str p.read(self._index) for section in p.sections(): if section == "files": for (key, value) in p.items(section): self.files[key] = value
def _load(self): """Load the file list from the index file. @files will be an empty dictionary if the file doesn't exist. """ root_logger.debug("Loading Index file from '%s'", self._index) self.files = {} p = SafeConfigParser() p.optionxform = str p.read(self._index) for section in p.sections(): if section == "files": for (key, value) in p.items(section): self.files[key] = value
def load_namespace_conf(namespace, failsafe=False, fresh=False): """ Load configuration for the namespace from the local configuration files. :param namespace: name of the namespace. :param failsafe: in case of error, return a dummy configuration. :param fresh: if True, reload configuration from files, do not use the cache. :returns: a dictionary with local namespace configuration. """ if not fresh and namespace in NS_CONF_CACHE: return NS_CONF_CACHE[namespace] parser = SafeConfigParser({}) success = False # Do not load a non-overriding file (local) if any file has # already been loaded before. loaded_files = list() for p, override in _config_paths(): if override or not loaded_files: flist = parser.read((p,)) loaded_files.extend(flist) conf = NamespaceConfiguration(loaded_files, namespace=namespace) if not loaded_files: print('Unable to read namespace config. We tried %s' % ( [x for x in _config_paths()])) else: import logging logging.info("Configuration loaded from %s", repr(loaded_files)) if not parser.has_section(namespace): print('Unable to find [%s] section in any of %s' % ( namespace, loaded_files)) else: conf.update(parser.items(namespace)) proxy = conf.get('proxy') if not proxy: print("Missing field proxy in namespace config") else: success = True NS_CONF_CACHE[namespace] = conf if not (success or failsafe): sys.exit(1) return conf
def _load(self): """Load the modules from the file @_path. @modules will be an empty dictionary if the file doesn't exist. """ root_logger.debug("Loading StateFile from '%s'", self._path) self.modules = {} p = SafeConfigParser() p.optionxform = str p.read(self._path) for module in p.sections(): self.modules[module] = {} for (key, value) in p.items(module): if value == str(True): value = True elif value == str(False): value = False self.modules[module][key] = value
def _load(self): """Load the modules from the file @_path. @modules will be an empty dictionary if the file doesn't exist. """ root_logger.debug("Loading StateFile from '%s'", self._path) self.modules = {} p = SafeConfigParser() p.optionxform = str p.read(self._path) for module in p.sections(): self.modules[module] = {} for (key, value) in p.items(module): if value == str(True): value = True elif value == str(False): value = False self.modules[module][key] = value
def read_default_rbac_rules(self, conf_file): config = SafeConfigParser() config.read(conf_file) raw_rules = {} if 'default-domain' in config.sections(): raw_rules = dict(config.items('default-domain')) rbac_rules = [] for lhs, rhs in list(raw_rules.items()): # lhs is object.field, rhs is list of perms obj_field = lhs.split(".") perms = rhs.split(",") # perms ['foo:CRU', 'bar:CR'] role_to_crud_dict = {} for perm in perms: p = perm.split(":") # both role and crud must be specified if len(p) < 2: continue # <crud> must be [CRUD] rn = p[0].strip() rc = p[1].strip() if not set(rc).issubset(set('CRUD')): continue role_to_crud_dict[rn] = rc if len(role_to_crud_dict) == 0: continue rule = { 'rule_object': obj_field[0], 'rule_field': obj_field[1] if len(obj_field) > 1 else None, 'rule_perms': [{ 'role_name': rn, 'role_crud': rc } for rn, rc in list(role_to_crud_dict.items())], } rbac_rules.append(rule) return rbac_rules
def _update_settings_from_file(section, settings): tries = 0 current_directory = os.path.normpath(os.getcwd()) config_file = None while current_directory and tries < MAX_CONFIG_SEARCH_DEPTH: potential_path = os.path.join(current_directory, 'setup.cfg') if os.path.exists(potential_path): config_file = potential_path break new_directory = os.path.split(current_directory)[0] if current_directory == new_directory: break current_directory = new_directory tries += 1 if config_file and os.path.exists(config_file): with open(config_file, 'rU') as fp: config = SafeConfigParser() config.readfp(fp) if config.has_section('tool:multilint'): settings.update(sanitize(config.items('tool:multilint')))
def load_namespace_conf(namespace): def places(): yield '/etc/oio/sds.conf' for f in glob('/etc/oio/sds.conf.d/*'): yield f yield path.expanduser('~/.oio/sds.conf') c = SafeConfigParser({}) success = c.read(places()) if not success: print('Unable to read namespace config') exit(1) if c.has_section(namespace): conf = dict(c.items(namespace)) else: print('Unable to find [%s] section config' % namespace) exit(1) proxy = conf.get('proxy') if not proxy: print("Missing field proxy in namespace config") exit(1) return conf
def _update_settings_from_file(section, settings): tries = 0 current_directory = os.path.normpath(os.getcwd()) config_file = None while current_directory and tries < MAX_CONFIG_SEARCH_DEPTH: potential_path = os.path.join(current_directory, 'setup.cfg') if os.path.exists(potential_path): config_file = potential_path break new_directory = os.path.split(current_directory)[0] if current_directory == new_directory: break current_directory = new_directory tries += 1 if config_file and os.path.exists(config_file): with open(config_file, 'rU') as fp: config = SafeConfigParser() config.readfp(fp) if config.has_section('tool:multilint'): settings.update(sanitize(config.items('tool:multilint')))
def parse_config_file(filename): """ Parses a configuration file and returns a settings dictionary. Args: filename (str): File to read configuration settings from. Returns: dict: A dictionary of settings options. """ parser = SafeConfigParser() with open(filename) as fp: parser.readfp(fp) settings = { section: { item[0]: _parse_config_val(item[1]) for item in parser.items(section) } for section in parser.sections() } return settings
def parse_args(args_str): args_obj = None # Source any specified config/ini file # Turn off help, so we print all options in response to -h conf_parser = argparse.ArgumentParser(add_help=False) conf_parser.add_argument("-c", "--conf_file", action='append', help="Specify config file", metavar="FILE") args, remaining_argv = conf_parser.parse_known_args(args_str.split()) defaults = { 'reset_config': False, 'wipe_config': False, 'listen_ip_addr': _WEB_HOST, 'listen_port': _WEB_PORT, 'admin_port': _ADMIN_PORT, 'cassandra_server_list': "127.0.0.1:9160", 'collectors': None, 'http_server_port': '8084', 'http_server_ip': _WEB_HOST, 'log_local': True, 'log_level': SandeshLevel.SYS_NOTICE, 'log_category': '', 'log_file': Sandesh._DEFAULT_LOG_FILE, 'trace_file': '/var/log/contrail/vnc_openstack.err', 'use_syslog': False, 'syslog_facility': Sandesh._DEFAULT_SYSLOG_FACILITY, 'logging_conf': '', 'logger_class': None, 'multi_tenancy': None, 'aaa_mode': None, 'zk_server_ip': '127.0.0.1:2181', 'worker_id': '0', 'rabbit_server': 'localhost', 'rabbit_port': '5672', 'rabbit_user': '******', 'rabbit_password': '******', 'rabbit_vhost': None, 'rabbit_ha_mode': False, 'rabbit_max_pending_updates': '4096', 'rabbit_health_check_interval': '120.0', # in seconds 'cluster_id': '', 'max_requests': 1024, 'paginate_count': 256, 'region_name': 'RegionOne', 'stale_lock_seconds': '5', # lock but no resource past this => stale 'cloud_admin_role': cfgm_common.CLOUD_ADMIN_ROLE, 'global_read_only_role': cfgm_common.GLOBAL_READ_ONLY_ROLE, 'rabbit_use_ssl': False, 'kombu_ssl_version': '', 'kombu_ssl_keyfile': '', 'kombu_ssl_certfile': '', 'kombu_ssl_ca_certs': '', 'object_cache_entries': '10000', # max number of objects cached for read 'object_cache_exclude_types': '', # csv of object types to *not* cache 'debug_object_cache_types': '', # csv of object types to debug cache 'db_engine': 'cassandra', 'max_request_size': 1024000, 'amqp_timeout': 660, 'config_api_ssl_enable': False, 'config_api_ssl_keyfile': '', 'config_api_ssl_certfile': '', 'config_api_ssl_ca_cert': '', 'tcp_keepalive_enable': True, 'tcp_keepalive_idle_time': 7200, 'tcp_keepalive_interval': 75, 'tcp_keepalive_probes': 9, } defaults.update(SandeshConfig.get_default_options(['DEFAULTS'])) # keystone options ksopts = { 'signing_dir': '/var/lib/contrail/keystone-signing', 'auth_host': '127.0.0.1', 'auth_port': '35357', 'auth_protocol': 'http', 'admin_user': '', 'admin_password': '', 'admin_tenant_name': '', 'admin_user_domain_name': None, 'identity_uri': None, 'project_domain_name': None, 'insecure': True, 'cafile': '', 'certfile': '', 'keyfile': '', 'auth_type': 'password', 'auth_url': '', 'default_domain_id': 'default', } # cassandra options cassandraopts = { 'cassandra_user' : None, 'cassandra_password' : None } # sandesh options sandeshopts = SandeshConfig.get_default_options() config = None saved_conf_file = args.conf_file if args.conf_file: config = SafeConfigParser({'admin_token': None}, allow_no_value=True) config.read(args.conf_file) if 'DEFAULTS' in config.sections(): defaults.update(dict(config.items("DEFAULTS"))) if 'multi_tenancy' in config.options('DEFAULTS'): defaults['multi_tenancy'] = config.getboolean( 'DEFAULTS', 'multi_tenancy') if 'default_encoding' in config.options('DEFAULTS'): default_encoding = config.get('DEFAULTS', 'default_encoding') gen.resource_xsd.ExternalEncoding = default_encoding if 'KEYSTONE' in config.sections(): ksopts.update(dict(config.items("KEYSTONE"))) if 'QUOTA' in config.sections(): for (k, v) in config.items("QUOTA"): try: if str(k) != 'admin_token': vnc_quota.QuotaHelper.default_quota[str(k)] = int(v) except ValueError: pass if 'CASSANDRA' in config.sections(): cassandraopts.update(dict(config.items('CASSANDRA'))) SandeshConfig.update_options(sandeshopts, config) # Override with CLI options # Don't surpress add_help here so it will handle -h parser = argparse.ArgumentParser( # Inherit options from config_parser parents=[conf_parser], # print script description with -h/--help description=__doc__, # Don't mess with format of description formatter_class=argparse.RawDescriptionHelpFormatter, ) defaults.update(ksopts) defaults.update(cassandraopts) defaults.update(sandeshopts) parser.set_defaults(**defaults) parser.add_argument( "--cassandra_server_list", help="List of cassandra servers in IP Address:Port format", nargs='+') parser.add_argument( "--cassandra_use_ssl", action="store_true", help="Enable TLS for cassandra connection") parser.add_argument( "--cassandra_ca_certs", help="Cassandra CA certs") parser.add_argument( "--redis_server_ip", help="IP address of redis server") parser.add_argument( "--redis_server_port", help="Port of redis server") parser.add_argument( "--auth", choices=['keystone', 'noauth', 'no-auth'], help="Type of authentication for user-requests") parser.add_argument( "--reset_config", action="store_true", help="Warning! Destroy previous configuration and start clean") parser.add_argument( "--wipe_config", action="store_true", help="Warning! Destroy previous configuration") parser.add_argument( "--listen_ip_addr", help="IP address to provide service on, default %s" % (_WEB_HOST)) parser.add_argument( "--listen_port", help="Port to provide service on, default %s" % (_WEB_PORT)) parser.add_argument( "--admin_port", help="Port with local auth for admin access, default %s" % (_ADMIN_PORT)) parser.add_argument( "--collectors", help="List of VNC collectors in ip:port format", nargs="+") parser.add_argument( "--http_server_port", help="Port of Introspect HTTP server") parser.add_argument( "--http_server_ip", help="IP address of Introspect HTTP server, default %s" % (_WEB_HOST)) parser.add_argument( "--log_local", action="store_true", help="Enable local logging of sandesh messages") parser.add_argument( "--log_level", help="Severity level for local logging of sandesh messages") parser.add_argument( "--logging_conf", help=("Optional logging configuration file, default: None")) parser.add_argument( "--logger_class", help=("Optional external logger class, default: None")) parser.add_argument( "--log_category", help="Category filter for local logging of sandesh messages") parser.add_argument( "--log_file", help="Filename for the logs to be written to") parser.add_argument( "--trace_file", help="Filename for the errors backtraces to be written to") parser.add_argument("--use_syslog", action="store_true", help="Use syslog for logging") parser.add_argument("--syslog_facility", help="Syslog facility to receive log lines") parser.add_argument( "--multi_tenancy", action="store_true", help="Validate resource permissions (implies token validation)") parser.add_argument( "--aaa_mode", choices=AAA_MODE_VALID_VALUES, help="AAA mode") parser.add_argument( "--worker_id", help="Worker Id") parser.add_argument( "--zk_server_ip", help="Ip address:port of zookeeper server") parser.add_argument( "--rabbit_server", help="Rabbitmq server address") parser.add_argument( "--rabbit_port", help="Rabbitmq server port") parser.add_argument( "--rabbit_user", help="Username for rabbit") parser.add_argument( "--rabbit_vhost", help="vhost for rabbit") parser.add_argument( "--rabbit_password", help="password for rabbit") parser.add_argument( "--rabbit_ha_mode", help="True if the rabbitmq cluster is mirroring all queue") parser.add_argument( "--rabbit_max_pending_updates", help="Max updates before stateful changes disallowed") parser.add_argument( "--rabbit_health_check_interval", help="Interval seconds between consumer heartbeats to rabbitmq") parser.add_argument( "--cluster_id", help="Used for database keyspace separation") parser.add_argument( "--max_requests", type=int, help="Maximum number of concurrent requests served by api server") parser.add_argument( "--paginate_count", type=int, help="Default number of items when pagination is requested") parser.add_argument("--cassandra_user", help="Cassandra user name") parser.add_argument("--cassandra_password", help="Cassandra password") parser.add_argument("--stale_lock_seconds", help="Time after which lock without resource is stale, default 60") parser.add_argument( "--cloud_admin_role", help="Role name of cloud administrator") parser.add_argument( "--global_read_only_role", help="Role name of user with Read-Only access to all objects") parser.add_argument("--object_cache_entries", help="Maximum number of objects cached for read, default 10000") parser.add_argument("--object_cache_exclude_types", help="Comma separated values of object types to not cache") parser.add_argument( "--debug_object_cache_types", help="Comma separated values of object types to debug trace between " "the cache and the DB") parser.add_argument("--db_engine", help="Database engine to use, default cassandra") parser.add_argument("--max_request_size", type=int, help="Maximum size of bottle requests served by api server") parser.add_argument("--amqp_timeout", help="Timeout for amqp request") SandeshConfig.add_parser_arguments(parser) args_obj, remaining_argv = parser.parse_known_args(remaining_argv) args_obj.conf_file = args.conf_file args_obj.config_sections = config if isinstance(args_obj.cassandra_server_list, string_types): args_obj.cassandra_server_list =\ args_obj.cassandra_server_list.split() if isinstance(args_obj.collectors, string_types): args_obj.collectors = args_obj.collectors.split() args_obj.sandesh_config = SandeshConfig.from_parser_arguments(args_obj) args_obj.cassandra_use_ssl = (str(args_obj.cassandra_use_ssl).lower() == 'true') args_obj.config_api_ssl_enable = (str(args_obj.config_api_ssl_enable).lower() == 'true') # convert log_local to a boolean if not isinstance(args_obj.log_local, bool): args_obj.log_local = bool(literal_eval(args_obj.log_local)) args_obj.conf_file = saved_conf_file return args_obj, remaining_argv
def configparser2yaml(cpfile): dict_ = {} cp = ConfigParser() with codecs.open(cpfile, encoding='utf-8') as fh: cp.readfp(fh) for section in cp.sections(): if section.startswith('contact:'): # contacts are now nested if 'contact' not in dict_: dict_['contact'] = {} section2 = dict_['contact'][section.split(':')[1]] = {} elif section.startswith('distribution:'): # distributions now nested if 'distribution' not in dict_: dict_['distribution'] = {} section2 = dict_['distribution'][section.split(':')[1]] = {} else: section2 = dict_[section] = {} for k, v in cp.items(section): if section == 'identification': # keywords are now nested if 'keywords' not in section2: section2['keywords'] = {} if 'default' not in section2['keywords']: section2['keywords']['default'] = {} if 'gc_cst' not in section2['keywords']: section2['keywords']['gc_cst'] = {} if 'wmo' not in section2['keywords']: section2['keywords']['wmo'] = {} if 'hnap_category_information' not in section2['keywords']: section2['keywords']['hnap_category_information'] = {} if 'hnap_category_geography' not in section2['keywords']: section2['keywords']['hnap_category_geography'] = {} if 'hnap_category_content' not in section2['keywords']: section2['keywords']['hnap_category_content'] = {} if k in ['topiccategory']: section2['topiccategory'] = [v] if k in ['keywords_en', 'keywords_fr']: section2['keywords']['default'][k] = [k2.strip() for k2 in v.split(',')] # noqa if k in ['keywords_gc_cst_en']: section2['keywords']['gc_cst']['keywords_en'] = [k2.strip() for k2 in v.split(',')] # noqa if k in ['keywords_gc_cst_fr']: section2['keywords']['gc_cst']['keywords_fr'] = [k2.strip() for k2 in v.split(',')] # noqa if k in ['keywords_wmo']: section2['keywords']['wmo']['keywords_en'] = [k2.strip() for k2 in v.split(',')] # noqa if k in ['hnap_category_information_en']: section2['keywords']['hnap_category_information']['keywords_en'] = [v] # noqa section2['keywords']['hnap_category_information']['keywords_fr'] = [v] # noqa if k in ['hnap_category_geography_en']: section2['keywords']['hnap_category_geography']['keywords_en'] = [v] # noqa if k in ['hnap_category_geography_fr']: section2['keywords']['hnap_category_geography']['keywords_fr'] = [v] # noqa if k in ['hnap_category_content_en']: section2['keywords']['hnap_category_content']['keywords_en'] = [v] # noqa if k in ['hnap_category_content_fr']: section2['keywords']['hnap_category_content']['keywords_fr'] = [v] # noqa if k == 'keywords_type': section2['keywords']['default'][k] = v section2['keywords']['gc_cst'][k] = v section2['keywords']['wmo'][k] = v section2['keywords']['hnap_category_geography'][k] = v section2['keywords']['hnap_category_information'][k] = v section2['keywords']['hnap_category_content'][k] = v else: section2[k] = v return yaml.safe_dump(dict_, default_flow_style=False, allow_unicode=True)
def parse_args(args_str): """ Please see the example below. python dm_server.py --rabbit_server localhost --rabbit_port 5672 --cassandra_server_list 10.1.2.3:9160 --api_server_ip 10.1.2.3 --api_server_use_ssl False --analytics_server_ip 10.1.2.3 --zk_server_ip 10.1.2.3 --zk_server_port 2181 --collectors 127.0.0.1:8086 --http_server_port 8090 [--reset_config] """ # Source any specified config/ini file # Turn off help, so we see all options in response to -h conf_parser = argparse.ArgumentParser(add_help=False) conf_parser.add_argument("-c", "--conf_file", action='append', help="Specify config file", metavar="FILE") args, remaining_argv = conf_parser.parse_known_args(args_str.split()) defaults = default_options() defaults.update(SandeshConfig.get_default_options(['DEFAULTS'])) defaults.update(SandeshConfig.get_default_options()) saved_conf_file = args.conf_file if args.conf_file: config = SafeConfigParser() config.read(args.conf_file) defaults.update(dict(config.items("DEFAULTS"))) if ('SECURITY' in config.sections() and 'use_certs' in config.options('SECURITY')): if config.getboolean('SECURITY', 'use_certs'): defaults.update(dict(config.items("SECURITY"))) if 'KEYSTONE' in config.sections(): defaults.update(dict(config.items("KEYSTONE"))) if 'CASSANDRA' in config.sections(): defaults.update(dict(config.items('CASSANDRA'))) SandeshConfig.update_options(defaults, config) # Override with CLI options # Don't surpress add_help here so it will handle -h parser = argparse.ArgumentParser( # Inherit options from config_parser parents=[conf_parser], # print script description with -h/--help description=__doc__, # Don't mess with format of description formatter_class=argparse.RawDescriptionHelpFormatter, ) parser.set_defaults(**defaults) add_parser_arguments(parser) args = parser.parse_args(remaining_argv) if type(args.cassandra_server_list) is str: args.cassandra_server_list = args.cassandra_server_list.split() if type(args.collectors) is str: args.collectors = args.collectors.split() args.sandesh_config = SandeshConfig.from_parser_arguments(args) args.cassandra_use_ssl = (str(args.cassandra_use_ssl).lower() == 'true') args.rabbit_use_ssl = (str(args.rabbit_use_ssl).lower() == 'true') args.zookeeper_ssl_enable = ( str(args.zookeeper_ssl_enable).lower() == 'true') args.dnsmasq_reload_by_signal = \ (str(args.dnsmasq_reload_by_signal).lower() == 'true') args.conf_file = saved_conf_file return args
def main(args_str=' '.join(sys.argv[1:])): # Parse Arguments node_parser = argparse.ArgumentParser(add_help=False) node_parser.add_argument("--nodetype", default='contrail-analytics', help='Type of node which nodemgr is managing') try: args, remaining_argv = node_parser.parse_known_args(args_str.split()) except Exception: print_usage_and_exit() default = { 'rules': '', 'collectors': [], 'db_port': '9042', 'db_jmx_port': '7199', 'db_user': None, 'db_password': None, 'db_use_ssl': False, 'minimum_diskgb': 256, 'corefile_path': '/var/crashes', 'cassandra_repair_interval': 24, 'cassandra_repair_logdir': '/var/log/contrail/', 'log_local': False, 'log_level': SandeshLevel.SYS_DEBUG, 'log_category': '', 'log_file': Sandesh._DEFAULT_LOG_FILE, 'use_syslog': False, 'syslog_facility': Sandesh._DEFAULT_SYSLOG_FACILITY, 'hostname': None } try: default['hostip'] = socket.gethostbyname(socket.getfqdn()) except Exception: pass default.update(SandeshConfig.get_default_options(['DEFAULTS'])) sandesh_opts = SandeshConfig.get_default_options() node_type = args.nodetype if node_type not in node_properties: sys.stderr.write("Node type '" + str(node_type) + "' is incorrect\n") sys.exit(1) config_parser = argparse.ArgumentParser(parents=[node_parser], add_help=False) config_parser.add_argument( "--config", default=node_properties[node_type]['config_file'], help='Path to a config file') try: args, remaining_argv = config_parser.parse_known_args(remaining_argv) except Exception: print_usage_and_exit() config_file_path = args.config if (os.path.exists(config_file_path) is False): sys.stderr.write("config file '" + config_file_path + "' is not present\n") sys.exit(1) config = SafeConfigParser() config.read([config_file_path]) if 'DEFAULTS' in config.sections(): default.update(dict(config.items('DEFAULTS'))) if 'COLLECTOR' in config.sections(): try: collector = config.get('COLLECTOR', 'server_list') default['collectors'] = collector.split() except NoOptionError: pass SandeshConfig.update_options(sandesh_opts, config) parser = argparse.ArgumentParser( parents=[config_parser], formatter_class=argparse.ArgumentDefaultsHelpFormatter) default.update(sandesh_opts) parser.set_defaults(**default) parser.add_argument("--rules", help='Rules file to use for processing events') parser.add_argument( "--collectors", nargs='+', help='Collector addresses in format ip1:port1 ip2:port2') parser.add_argument("--log_local", action="store_true", help="Enable local logging of sandesh messages") parser.add_argument( "--log_level", help="Severity level for local logging of sandesh messages") parser.add_argument( "--log_category", help="Category filter for local logging of sandesh messages") parser.add_argument("--log_file", help="Filename for the logs to be written to") parser.add_argument("--use_syslog", action="store_true", help="Use syslog for logging") parser.add_argument("--syslog_facility", help="Syslog facility to receive log lines") parser.add_argument("--corefile_path", help="Location where coredump files are stored") parser.add_argument("--hostname", help="Custom Hostname") SandeshConfig.add_parser_arguments(parser, add_dscp=True) if node_type in ['contrail-database', 'contrail-config-database']: parser.add_argument("--minimum_diskGB", type=int, dest='minimum_diskgb', help="Minimum disk space in GB's") parser.add_argument("--hostip", help="IP address of host") parser.add_argument("--db_port", help="Cassandra DB cql port") parser.add_argument("--db_jmx_port", help="Cassandra DB jmx port") parser.add_argument("--db_user", help="Cassandra DB cql username") parser.add_argument("--db_password", help="Cassandra DB cql password") parser.add_argument("--db_use_ssl", help="Cassandra DB behind SSL or not") parser.add_argument("--cassandra_repair_interval", type=int, help="Time in hours to periodically run " "nodetool repair for cassandra maintenance") parser.add_argument("--cassandra_repair_logdir", help="Directory for storing repair logs") try: _args = parser.parse_args(remaining_argv) except Exception: print_usage_and_exit() _args.config_file_path = config_file_path _args.db_use_ssl = (str(_args.db_use_ssl).lower() == 'true') # done parsing arguments # TODO: restore rule_file logic somehow if needed for microservices # rule_file = _args.rules sys.stderr.write('ARGS: %s\n' % str(_args)) unit_names = node_properties[node_type]['unit_names'] setattr(_args, 'http_server_port', get_free_port()) with open('{0}/{1}'.format(_args.port_stash, _args.http_server_port), 'w') as p: sys.stderr.write('INTROSPECT PORT: %d\n' % _args.http_server_port) event_manager = node_properties[node_type]['event_manager'](_args, unit_names) event_manager.send_init_data() b = [] b.append(gevent.spawn(event_manager.runforever)) b.append( gevent.spawn(event_manager.run_periodically, event_manager.do_periodic_events, 60)) gevent.joinall(b)
def main(): config = SafeConfigParser() config.add_section('main') for k, v in DEFAULTS.items(): config.set('main', k, v) config.read([ '.bisque', os.path.expanduser('~/.bisque'), '/etc/bisque/bisque_config' ]) defaults = dict(config.items('main')) defaults.update(config.items('bqpath')) parser = argparse.ArgumentParser( description=DESCRIPTION, formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) #parser.add_argument('--help', action=_HelpAction, help='help for help if you need some help') # add custom help parser.add_argument('--alias', help="do action on behalf of user specified") parser.add_argument('-d', '--debug', action="store_true", default=False, help="log debugging") parser.add_argument('-H', '--host', default=defaults['bisque_host'], help="bisque host") parser.add_argument('-c', '--credentials', default="%s:%s" % (defaults['bisque_user'], defaults["bisque_pass"]), help="user credentials") parser.add_argument('-C', '--compatible', action="store_true", help="Make compatible with old script") parser.add_argument('-V', '--verbose', action='store_true', help='print stuff') #parser.add_argument('-P', '--permission', default="private", help="Set resource permission (compatibility)") #parser.add_argument('--hidden', default=None, help="Set resource visibility (hidden)") #parser.add_argument('command', help="one of ls, cp, mv, rm, ln" ) #parser.add_argument('paths', nargs='+') sp = parser.add_subparsers() lsp = sp.add_parser('ls') lsp.add_argument('-u', '--unique', default=None, action="store_true", help="return unique codes") lsp.add_argument('paths', nargs='+') lsp.set_defaults(func=bisque_list) lnp = sp.add_parser('ln') lnp.add_argument('-T', '--tag_file', default=None, help="tag document for insert") lnp.add_argument('-P', '--permission', default="private", help="Set resource permission (compatibility)") lnp.add_argument('-R', '--resource', default=None, help='force resource type') lnp.add_argument('--hidden', default=None, help="Set resource visibility (hidden)") lnp.add_argument('paths', nargs='+') lnp.set_defaults(func=bisque_link) cpp = sp.add_parser('cp') cpp.add_argument('paths', nargs='+') cpp.add_argument('-T', '--tag_file', default=None, help="tag document for insert") cpp.add_argument('-R', '--resource', default=None, help='force resource type') cpp.add_argument('-P', '--permission', default="private", help="Set resource permission (compatibility)") cpp.add_argument('--hidden', default=None, help="Set resource visibility (hidden)") cpp.set_defaults(func=bisque_copy) mvp = sp.add_parser('mv') mvp.add_argument('paths', nargs='+') mvp.set_defaults(func=bisque_rename) rmp = sp.add_parser('rm') rmp.add_argument('paths', nargs='+') rmp.set_defaults(func=bisque_delete) logging.basicConfig( filename=config.get('bqpath', 'logfile'), level=logging.INFO, format="%(asctime)s %(levelname)-5.5s [%(name)s] %(message)s") log = logging.getLogger('rods2bq') args = parser.parse_args() if args.debug: logging.getLogger().setLevel(logging.DEBUG) #if args.command not in OPERATIONS: # parser.error("command %s must be one of 'ln', 'ls', 'cp', 'mv', 'rm'" % args.command) if len(args.paths) > 1: args.dstpath = args.paths.pop() args.srcpath = args.paths else: args.srcpath = args.paths if args.compatible: paths = [] irods_host = defaults.get('irods_host') for el in args.srcpath: if not el.startswith('irods://'): paths.append(irods_host + el) else: paths.append(el) args.srcpath = paths if args.dstpath and not args.dstpath.startswith('irods://'): args.dstpath = irods_host + args.dstpath if args.debug: six.print_(args, file=sys.stderr) try: session = requests.Session() requests.packages.urllib3.disable_warnings() session.log = logging.getLogger('rods2bq') #session.verify = False session.auth = tuple(args.credentials.split(':')) #session.headers.update ( {'content-type': 'application/xml'} ) #OPERATIONS[args.command] (session, args) args.func(session, args) except requests.exceptions.HTTPError as e: log.exception("exception occurred %s : %s", e, e.response.text) six.print_("ERROR:", e.response and e.response.status_code) sys.exit(0)
def parse_args(args_str): # Turn off help, so we all options in response to -h conf_parser = argparse.ArgumentParser(add_help=False) conf_parser.add_argument("-c", "--conf_file", action='append', help="Specify config file", metavar="FILE") args, remaining_argv = conf_parser.parse_known_args(args_str.split()) defaults = { 'rabbit_server': 'localhost', 'rabbit_port': '5672', 'rabbit_user': '******', 'rabbit_password': '******', 'rabbit_vhost': None, 'rabbit_ha_mode': False, 'cassandra_server_list': '127.0.0.1:9160', 'api_server_ip': '127.0.0.1', 'api_server_port': '8082', 'api_server_use_ssl': None, 'zk_server_ip': '127.0.0.1', 'zk_server_port': '2181', 'collectors': None, 'http_server_port': '8087', 'http_server_ip': '0.0.0.0', 'log_local': False, 'log_level': SandeshLevel.SYS_DEBUG, 'log_category': '', 'log_file': Sandesh._DEFAULT_LOG_FILE, 'trace_file': '/var/log/contrail/schema.err', 'use_syslog': False, 'syslog_facility': Sandesh._DEFAULT_SYSLOG_FACILITY, 'cluster_id': '', 'logging_conf': '', 'logger_class': None, 'bgpaas_port_start': 50000, 'bgpaas_port_end': 50512, 'rabbit_use_ssl': False, 'kombu_ssl_version': '', 'kombu_ssl_keyfile': '', 'kombu_ssl_certfile': '', 'kombu_ssl_ca_certs': '', 'zk_timeout': 120, 'logical_routers_enabled': True, 'yield_in_evaluate': False, } defaults.update(SandeshConfig.get_default_options(['DEFAULTS'])) secopts = { 'use_certs': False, 'keyfile': '', 'certfile': '', 'ca_certs': '', } ksopts = { 'admin_user': '******', 'admin_password': '******', 'admin_tenant_name': 'default-domain' } cassandraopts = { 'cassandra_user': None, 'cassandra_password': None, } zookeeperopts = { 'zookeeper_ssl_enable': False, 'zookeeper_ssl_keyfile': None, 'zookeeper_ssl_certificate': None, 'zookeeper_ssl_ca_cert': None, } sandeshopts = SandeshConfig.get_default_options() saved_conf_file = args.conf_file if args.conf_file: config = SafeConfigParser() config.read(args.conf_file) defaults.update(dict(config.items("DEFAULTS"))) if ('SECURITY' in config.sections() and 'use_certs' in config.options('SECURITY')): if config.getboolean('SECURITY', 'use_certs'): secopts.update(dict(config.items("SECURITY"))) if 'KEYSTONE' in config.sections(): ksopts.update(dict(config.items("KEYSTONE"))) if 'CASSANDRA' in config.sections(): cassandraopts.update(dict(config.items('CASSANDRA'))) if 'ZOOKEEPER' in config.sections(): zookeeperopts.update(dict(config.items('ZOOKEEPER'))) SandeshConfig.update_options(sandeshopts, config) # Override with CLI options # Don't surpress add_help here so it will handle -h parser = argparse.ArgumentParser( # Inherit options from config_parser parents=[conf_parser], # print script description with -h/--help description=__doc__, # Don't mess with format of description formatter_class=argparse.RawDescriptionHelpFormatter, ) defaults.update(secopts) defaults.update(ksopts) defaults.update(cassandraopts) defaults.update(sandeshopts) parser.set_defaults(**defaults) def _bool(s): """Convert string to bool (in argparse context).""" if s.lower() not in ['true', 'false']: raise ValueError('Need bool; got %r' % s) return {'true': True, 'false': False}[s.lower()] parser.add_argument( "--cassandra_server_list", help="List of cassandra servers in IP Address:Port format", nargs='+') parser.add_argument( "--reset_config", action="store_true", help="Warning! Destroy previous configuration and start clean") parser.add_argument("--api_server_ip", help="IP address of API server") parser.add_argument("--api_server_port", help="Port of API server") parser.add_argument("--api_server_use_ssl", help="Use SSL to connect with API server") parser.add_argument("--zk_server_ip", help="IP address:port of zookeeper server") parser.add_argument("--collectors", help="List of VNC collectors in ip:port format", nargs="+") parser.add_argument("--http_server_port", help="Port of local HTTP server") parser.add_argument("--http_server_ip", help="IP of local HTTP server") parser.add_argument("--log_local", action="store_true", help="Enable local logging of sandesh messages") parser.add_argument( "--log_level", help="Severity level for local logging of sandesh messages") parser.add_argument( "--log_category", help="Category filter for local logging of sandesh messages") parser.add_argument("--log_file", help="Filename for the logs to be written to") parser.add_argument("--trace_file", help="Filename for the error " "backtraces to be written to") parser.add_argument("--use_syslog", action="store_true", help="Use syslog for logging") parser.add_argument("--syslog_facility", help="Syslog facility to receive log lines") parser.add_argument("--admin_user", help="Name of keystone admin user") parser.add_argument("--admin_password", help="Password of keystone admin user") parser.add_argument("--admin_tenant_name", help="Tenant name for keystone admin user") parser.add_argument("--cluster_id", help="Used for database keyspace separation") parser.add_argument( "--logging_conf", help=("Optional logging configuration file, default: None")) parser.add_argument("--logger_class", help=("Optional external logger class, default: None")) parser.add_argument("--cassandra_user", help="Cassandra user name") parser.add_argument("--cassandra_password", help="Cassandra password") parser.add_argument("--rabbit_server", help="Rabbitmq server address") parser.add_argument("--rabbit_port", help="Rabbitmq server port") parser.add_argument("--rabbit_user", help="Username for rabbit") parser.add_argument("--rabbit_vhost", help="vhost for rabbit") parser.add_argument("--rabbit_password", help="password for rabbit") parser.add_argument("--rabbit_ha_mode", action='store_true', help="True if the rabbitmq cluster is " "mirroring all queue") parser.add_argument("--bgpaas_port_start", type=int, help="Start port for bgp-as-a-service proxy") parser.add_argument("--bgpaas_port_end", type=int, help="End port for bgp-as-a-service proxy") parser.add_argument("--zk_timeout", type=int, help="Timeout for ZookeeperClient") parser.add_argument("--yield_in_evaluate", type=_bool, help="Yield for other greenlets during evaluate") parser.add_argument("--logical_routers_enabled", type=_bool, help="Enabled logical routers") parser.add_argument("--cassandra_use_ssl", action="store_true", help="Enable TLS for cassandra communication") parser.add_argument("--cassandra_ca_certs", help="Cassandra CA certs") parser.add_argument("--zookeeper_ssl_enable", help="Enable SSL in rest api server") parser.add_argument("--zookeeper_insecure_enable", help="Enable insecure mode") parser.add_argument("--zookeeper_ssl_certfile", help="Location of zookeeper ssl host certificate") parser.add_argument("--zookeeper_ssl_keyfile", help="Location of zookeeper ssl private key") parser.add_argument("--zookeeper_ssl_ca_cert", type=str, help="Location of zookeeper ssl CA certificate") SandeshConfig.add_parser_arguments(parser) args = parser.parse_args(remaining_argv) args.conf_file = saved_conf_file if isinstance(args.cassandra_server_list, string_types): args.cassandra_server_list = args.cassandra_server_list.split() if isinstance(args.collectors, string_types): args.collectors = args.collectors.split() args.sandesh_config = SandeshConfig.from_parser_arguments(args) args.cassandra_use_ssl = (str(args.cassandra_use_ssl).lower() == 'true') args.zookeeper_ssl_enable = (str( args.zookeeper_ssl_enable).lower() == 'true') return args
def _parse_args(self, args=None): self.context.original_begin = time.time() self.context.original_args = args if args is not None else sys.argv[1:] self.option_parser.disable_interspersed_args() try: options, args = self.option_parser.parse_args( self.context.original_args) except UnboundLocalError: # Happens sometimes with an error handler that doesn't raise its # own exception. We'll catch the error below with # error_encountered. pass self.option_parser.enable_interspersed_args() if self.option_parser.error_encountered: return None, None if options.version: self.option_parser.print_version() return None, None if not args or options.help: self.option_parser.print_help() return None, None self.context.original_main_args = self.context.original_args[:-len(args )] self.context.conf = {} if options.conf is None: options.conf = os.environ.get('SWIFTLY_CONF', '~/.swiftly.conf') try: conf_parser = SafeConfigParser() conf_parser.read(os.path.expanduser(options.conf)) for section in conf_parser.sections(): self.context.conf[section] = dict(conf_parser.items(section)) except ConfigParserError: pass for option_name in ('auth_url', 'auth_user', 'auth_key', 'auth_tenant', 'auth_methods', 'region', 'direct', 'local', 'proxy', 'snet', 'no_snet', 'retries', 'cache_auth', 'no_cache_auth', 'cdn', 'no_cdn', 'concurrency', 'eventlet', 'no_eventlet', 'verbose', 'no_verbose', 'direct_object_ring', 'insecure', 'bypass_url'): self._resolve_option(options, option_name, 'swiftly') for option_name in ('snet', 'no_snet', 'cache_auth', 'no_cache_auth', 'cdn', 'no_cdn', 'eventlet', 'no_eventlet', 'verbose', 'no_verbose', 'insecure'): if isinstance(getattr(options, option_name), six.string_types): setattr(options, option_name, getattr(options, option_name).lower() in TRUE_VALUES) for option_name in ('retries', 'concurrency'): if isinstance(getattr(options, option_name), six.string_types): setattr(options, option_name, int(getattr(options, option_name))) if options.snet is None: options.snet = False if options.no_snet is None: options.no_snet = False if options.retries is None: options.retries = 4 if options.cache_auth is None: options.cache_auth = False if options.no_cache_auth is None: options.no_cache_auth = False if options.cdn is None: options.cdn = False if options.no_cdn is None: options.no_cdn = False if options.concurrency is None: options.concurrency = 1 if options.eventlet is None: options.eventlet = False if options.no_eventlet is None: options.no_eventlet = False if options.verbose is None: options.verbose = False if options.no_verbose is None: options.no_verbose = False if options.insecure is None: options.insecure = False self.context.eventlet = None if options.eventlet: self.context.eventlet = True if options.no_eventlet: self.context.eventlet = False if self.context.eventlet is None: self.context.eventlet = False try: import eventlet # Eventlet 0.11.0 fixed the CPU bug if eventlet.__version__ >= '0.11.0': self.context.eventlet = True except ImportError: pass subprocess_module = None if self.context.eventlet: try: import eventlet.green.subprocess subprocess_module = eventlet.green.subprocess except ImportError: pass if subprocess_module is None: import subprocess subprocess_module = subprocess self.context.io_manager.subprocess_module = subprocess_module if options.verbose: self.context.verbosity = 1 self.context.verbose = self._verbose self.context.io_manager.verbose = functools.partial( self._verbose, skip_sub_command=True) options.retries = int(options.retries) if args and args[0] == 'help': return options, args elif options.local: self.context.client_manager = ClientManager( LocalClient, local_path=options.local, verbose=self._verbose) elif options.direct: self.context.client_manager = ClientManager( DirectClient, swift_proxy_storage_path=options.direct, attempts=options.retries + 1, eventlet=self.context.eventlet, verbose=self._verbose, direct_object_ring=options.direct_object_ring) else: auth_cache_path = None if options.cache_auth: auth_cache_path = os.path.join( tempfile.gettempdir(), '%s.swiftly' % os.environ.get('USER', 'user')) if not options.auth_url: with self.context.io_manager.with_stderr() as fp: fp.write('No Auth URL has been given.\n') fp.flush() return None, None self.context.client_manager = ClientManager( StandardClient, auth_methods=options.auth_methods, auth_url=options.auth_url, auth_tenant=options.auth_tenant, auth_user=options.auth_user, auth_key=options.auth_key, auth_cache_path=auth_cache_path, region=options.region, snet=options.snet, attempts=options.retries + 1, eventlet=self.context.eventlet, verbose=self._verbose, http_proxy=options.proxy, insecure=options.insecure, bypass_url=options.bypass_url) self.context.cdn = options.cdn self.context.concurrency = int(options.concurrency) return options, args
def items(self, name): section_name = self._get_section_name(name) return SafeConfigParser.items(self, section_name)
def parse_logger_args(self, config_args): config_args = json.loads(config_args) parser = argparse.ArgumentParser() defaults = { 'collectors': None, 'http_server_port': '-1', 'log_local': False, 'log_level': SandeshLevel.SYS_DEBUG, 'log_category': '', 'log_file': Sandesh._DEFAULT_LOG_FILE, 'use_syslog': False, 'syslog_facility': Sandesh._DEFAULT_SYSLOG_FACILITY, 'cluster_id': '', 'logging_conf': '', 'logger_class': None, 'max_job_task': self.TASK_POOL_SIZE, 'playbook_timeout': self.PLAYBOOK_TIMEOUT_VALUE, } defaults.update(SandeshConfig.get_default_options(['DEFAULTS'])) secopts = { 'use_certs': False, 'keyfile': '', 'certfile': '', 'ca_certs': '', } ksopts = {} sandeshopts = SandeshConfig.get_default_options() if config_args.get("fabric_ansible_conf_file"): config = SafeConfigParser() config.read(config_args['fabric_ansible_conf_file']) if 'DEFAULTS' in config.sections(): defaults.update(dict(config.items("DEFAULTS"))) if ('SECURITY' in config.sections() and 'use_certs' in config.options('SECURITY')): if config.getboolean('SECURITY', 'use_certs'): secopts.update(dict(config.items("SECURITY"))) if 'KEYSTONE' in config.sections(): ksopts.update(dict(config.items("KEYSTONE"))) SandeshConfig.update_options(sandeshopts, config) defaults.update(secopts) defaults.update(ksopts) defaults.update(sandeshopts) parser.set_defaults(**defaults) parser.add_argument("--collectors", help="List of VNC collectors in ip:port format", nargs="+") parser.add_argument("--http_server_port", help="Port of local HTTP server") parser.add_argument("--log_local", action="store_true", help="Enable local logging of sandesh messages") parser.add_argument("--log_level", help="Severity level for local logging" " of sandesh messages") parser.add_argument("--log_category", help="Category filter for local logging " "of sandesh messages") parser.add_argument("--log_file", help="Filename for the logs to be written to") parser.add_argument("--use_syslog", action="store_true", help="Use syslog for logging") parser.add_argument("--syslog_facility", help="Syslog facility to receive log lines") parser.add_argument("--admin_user", help="Name of keystone admin user") parser.add_argument("--admin_password", help="Password of keystone admin user") parser.add_argument("--admin_tenant_name", help="Tenant name for keystone admin user") parser.add_argument("--cluster_id", help="Used for database keyspace separation") parser.add_argument("--logging_conf", help=("Optional logging configuration " "file, default: None")) parser.add_argument("--logger_class", help=("Optional external logger class," " default: None")) parser.add_argument("--max_job_task", help=("Maximum job tasks that can execute in " "parallel in a parent job, default: %s" % self.TASK_POOL_SIZE)) parser.add_argument("--playbook_timeout", help=("Playbook execution timeout value," " default: 60 min")) SandeshConfig.add_parser_arguments(parser) args = parser.parse_args(list()) args.conf_file = config_args.get('fabric_ansible_conf_file') args.collectors = config_args.get('collectors') args.host_ip = config_args.get('host_ip') args.zk_server_ip = config_args.get('zk_server_ip') args.cluster_id = config_args.get('cluster_id') if isinstance(args.collectors, str): args.collectors = args.collectors.split() args.sandesh_config = SandeshConfig.from_parser_arguments(args) self.args = args return args
def configparser2yaml(cpfile): dict_ = {} cp = ConfigParser() with codecs.open(cpfile, encoding='utf-8') as fh: cp.readfp(fh) for section in cp.sections(): if section.startswith('contact:'): # contacts are now nested if 'contact' not in dict_: dict_['contact'] = {} section2 = dict_['contact'][section.split(':')[1]] = {} elif section.startswith('distribution:'): # distributions now nested if 'distribution' not in dict_: dict_['distribution'] = {} section2 = dict_['distribution'][section.split(':')[1]] = {} else: section2 = dict_[section] = {} for k, v in cp.items(section): if section == 'identification': # keywords are now nested if 'keywords' not in section2: section2['keywords'] = {} if 'default' not in section2['keywords']: section2['keywords']['default'] = {} if 'gc_cst' not in section2['keywords']: section2['keywords']['gc_cst'] = {} if 'wmo' not in section2['keywords']: section2['keywords']['wmo'] = {} if 'hnap_category_information' not in section2['keywords']: section2['keywords']['hnap_category_information'] = {} if 'hnap_category_geography' not in section2['keywords']: section2['keywords']['hnap_category_geography'] = {} if 'hnap_category_content' not in section2['keywords']: section2['keywords']['hnap_category_content'] = {} if k in ['topiccategory']: section2['topiccategory'] = [v] if k in ['keywords_en', 'keywords_fr']: section2['keywords']['default'][k] = [ k2.strip() for k2 in v.split(',') ] # noqa if k in ['keywords_gc_cst_en']: section2['keywords']['gc_cst']['keywords_en'] = [ k2.strip() for k2 in v.split(',') ] # noqa if k in ['keywords_gc_cst_fr']: section2['keywords']['gc_cst']['keywords_fr'] = [ k2.strip() for k2 in v.split(',') ] # noqa if k in ['keywords_wmo']: section2['keywords']['wmo']['keywords_en'] = [ k2.strip() for k2 in v.split(',') ] # noqa if k in ['hnap_category_information_en']: section2['keywords']['hnap_category_information'][ 'keywords_en'] = [v] # noqa section2['keywords']['hnap_category_information'][ 'keywords_fr'] = [v] # noqa if k in ['hnap_category_geography_en']: section2['keywords']['hnap_category_geography'][ 'keywords_en'] = [v] # noqa if k in ['hnap_category_geography_fr']: section2['keywords']['hnap_category_geography'][ 'keywords_fr'] = [v] # noqa if k in ['hnap_category_content_en']: section2['keywords']['hnap_category_content'][ 'keywords_en'] = [v] # noqa if k in ['hnap_category_content_fr']: section2['keywords']['hnap_category_content'][ 'keywords_fr'] = [v] # noqa if k == 'keywords_type': section2['keywords']['default'][k] = v section2['keywords']['gc_cst'][k] = v section2['keywords']['wmo'][k] = v section2['keywords']['hnap_category_geography'][k] = v section2['keywords']['hnap_category_information'][k] = v section2['keywords']['hnap_category_content'][k] = v else: section2[k] = v return yaml.safe_dump(dict_, default_flow_style=False, allow_unicode=True)
def deploy_django(proj): """ Deploy a Django project """ wsgi_base_path = os.environ.get('WSGI_BASE_PATH', '/var/www/wsgi') httpd_conf_dir = os.environ.get('HTTPD_CONF_DIR', '/etc/httpd/locations.d') httpd_host = os.environ.get('HTTPD_HOST', platform.node()) httpd_media_base = os.environ.get('HTTPD_MEDIA_BASE', '/var/www/html/media') httpd_static_base = os.environ.get('HTTPD_STATIC_BASE', '/var/www/html/static') secret_key_gen = os.environ.get('SECRET_KEY_GEN', '/usr/bin/pwgen -c -n -y 78 1') proj_base = os.path.join(wsgi_base_path, proj) path = lambda p: os.path.join(proj_base, p) proj_defaults = { 'name': proj, 'proj_base': proj_base, 'dst': '%(name)s-project', 'settings': '%(name)s_production', 'url': '/%(name)s', 'build': 'build/build.sh', 'wsgi': 'wsgi.py', 'allowed_hosts': httpd_host, 'secret_key': subprocess.check_output(secret_key_gen.split() ).strip().replace("'", "-"), 'media_root': os.path.join(httpd_media_base, proj), 'static_root': os.path.join(httpd_static_base, proj), 'scm': '/usr/bin/git', 'settings_append': DEFAULT_SETTINGS_APPEND, 'deploy_requires': None, 'deploy_commands': ['migrate'] } # Protect '%' from interpolation proj_defaults['secret_key'] = re.sub(r'%', r'', proj_defaults['secret_key']) # Choose clone command proj_defaults['scm_clone'] = SCM_DEFAULT_CHECKOUT[os.path.split( proj_defaults['scm'])[-1]] # Load defaults cfg = SafeConfigParser(proj_defaults) # Force read cfg.readfp(open(proj+'.cfg', 'r')) #logger.debug('Final configuration:') #for k,v in cfg.items(CFG_SECTION): # logger.debug('\t%s: %s', k, v) # Create directory os.mkdir(proj_base) # Virtualenv virtualenv.create_environment(proj_base) # Checkout subprocess.check_call([ cfg.get(CFG_SECTION, 'scm'), cfg.get(CFG_SECTION, 'scm_clone'), cfg.get(CFG_SECTION, 'src'), path(cfg.get(CFG_SECTION, 'dst')), ]) # Build activate = path('bin/activate') build = os.path.join( cfg.get(CFG_SECTION, 'dst'), cfg.get(CFG_SECTION, 'build') ) subprocess.check_call([build], cwd=proj_base, env={'BASH_ENV': activate}) # Install Deploy Requiremts deploy_requires = cfg.get(CFG_SECTION, 'deploy_requires') if deploy_requires: logger.debug('Installing: %s', deploy_requires) cmd = [os.path.join(virtualenv.path_locations(proj_base)[-1], 'pip') , 'install'] cmd.extend(parse_list(deploy_requires)) subprocess.check_call(cmd) # Create settings settings_file = path(cfg.get(CFG_SECTION, 'settings'))+'.py' slock = LockFile(settings_file) slock.acquire() if os.path.exists(settings_file): slock.release() raise IOError([17, 'File exists']) try: sfp = open(settings_file, 'w') print(DJANGO_SETTINGS_TEMPLATE % dict(cfg.items(CFG_SECTION)), file=sfp) sfp.close() finally: slock.release() # Create wsgi script wsgi_file = path(cfg.get(CFG_SECTION, 'wsgi')) slock = LockFile(wsgi_file) slock.acquire() if os.path.exists(wsgi_file): slock.release() raise IOError([17, 'File exists']) try: wfp = open(wsgi_file, 'w') print(WSGI_TEMPLATE % dict(cfg.items(CFG_SECTION)), file=wfp) wfp.close() finally: slock.release() # Create apache conf conf_file = os.path.join(httpd_conf_dir, cfg.get(CFG_SECTION, 'name'))+'.conf' slock = LockFile(conf_file) slock.acquire() if os.path.exists(conf_file): slock.release() raise IOError([17, 'File exists']) try: sfp = open(conf_file, 'w') conf = dict(cfg.items(CFG_SECTION)) conf['site_libs'] = os.path.join( virtualenv.path_locations(proj_base)[1], 'site-packages') http_conf = HTTPD_CONF_TEMPLATE % conf print(http_conf, file=sfp) sfp.close() finally: slock.release() # Perform django commands deploy_commands = cfg.get(CFG_SECTION, 'deploy_commands') if deploy_commands: manage = [os.path.join(virtualenv.path_locations(proj_base)[-1], virtualenv.expected_exe) , 'manage.py'] os.chdir(path(cfg.get(CFG_SECTION, 'dst'))) # Deployment django environment dep_env = os.environ.copy() dep_env['DJANGO_SETTINGS_MODULE'] = cfg.get(CFG_SECTION, 'settings') dep_env['PYTHONPATH'] = path('.') logger.debug('Environment for commands: PYTHONPATH=%s', dep_env['PYTHONPATH']) logger.debug(' Django settings: %s', dep_env['DJANGO_SETTINGS_MODULE']) for cmd in parse_list(deploy_commands): logger.debug("Executing '%s'", ' '.join(manage+[cmd])) subprocess.check_call(manage+cmd.split(), env=dep_env) # That's it. Remember to reload apache print('You should reload apache:\n', '\t', 'systemctl reload httpd') return True
def _parse_args(self, args=None): self.context.original_begin = time.time() self.context.original_args = args if args is not None else sys.argv[1:] self.option_parser.disable_interspersed_args() try: options, args = self.option_parser.parse_args( self.context.original_args) except UnboundLocalError: # Happens sometimes with an error handler that doesn't raise its # own exception. We'll catch the error below with # error_encountered. pass self.option_parser.enable_interspersed_args() if self.option_parser.error_encountered: return None, None if options.version: self.option_parser.print_version() return None, None if not args or options.help: self.option_parser.print_help() return None, None self.context.original_main_args = self.context.original_args[ :-len(args)] self.context.conf = {} if options.conf is None: options.conf = os.environ.get('SWIFTLY_CONF', '~/.swiftly.conf') try: conf_parser = SafeConfigParser() conf_parser.read(os.path.expanduser(options.conf)) for section in conf_parser.sections(): self.context.conf[section] = dict(conf_parser.items(section)) except ConfigParserError: pass for option_name in ( 'auth_url', 'auth_user', 'auth_key', 'auth_tenant', 'auth_methods', 'region', 'direct', 'local', 'proxy', 'snet', 'no_snet', 'retries', 'cache_auth', 'no_cache_auth', 'cdn', 'no_cdn', 'concurrency', 'eventlet', 'no_eventlet', 'verbose', 'no_verbose', 'direct_object_ring'): self._resolve_option(options, option_name, 'swiftly') for option_name in ( 'snet', 'no_snet', 'cache_auth', 'no_cache_auth', 'cdn', 'no_cdn', 'eventlet', 'no_eventlet', 'verbose', 'no_verbose'): if isinstance(getattr(options, option_name), six.string_types): setattr( options, option_name, getattr(options, option_name).lower() in TRUE_VALUES) for option_name in ('retries', 'concurrency'): if isinstance(getattr(options, option_name), six.string_types): setattr( options, option_name, int(getattr(options, option_name))) if options.snet is None: options.snet = False if options.no_snet is None: options.no_snet = False if options.retries is None: options.retries = 4 if options.cache_auth is None: options.cache_auth = False if options.no_cache_auth is None: options.no_cache_auth = False if options.cdn is None: options.cdn = False if options.no_cdn is None: options.no_cdn = False if options.concurrency is None: options.concurrency = 1 if options.eventlet is None: options.eventlet = False if options.no_eventlet is None: options.no_eventlet = False if options.verbose is None: options.verbose = False if options.no_verbose is None: options.no_verbose = False self.context.eventlet = None if options.eventlet: self.context.eventlet = True if options.no_eventlet: self.context.eventlet = False if self.context.eventlet is None: self.context.eventlet = False try: import eventlet # Eventlet 0.11.0 fixed the CPU bug if eventlet.__version__ >= '0.11.0': self.context.eventlet = True except ImportError: pass subprocess_module = None if self.context.eventlet: try: import eventlet.green.subprocess subprocess_module = eventlet.green.subprocess except ImportError: pass if subprocess_module is None: import subprocess subprocess_module = subprocess self.context.io_manager.subprocess_module = subprocess_module if options.verbose: self.context.verbosity = 1 self.context.verbose = self._verbose self.context.io_manager.verbose = functools.partial( self._verbose, skip_sub_command=True) options.retries = int(options.retries) if args and args[0] == 'help': return options, args elif options.local: self.context.client_manager = ClientManager( LocalClient, local_path=options.local, verbose=self._verbose) elif options.direct: self.context.client_manager = ClientManager( DirectClient, swift_proxy_storage_path=options.direct, attempts=options.retries + 1, eventlet=self.context.eventlet, verbose=self._verbose, direct_object_ring=options.direct_object_ring) else: auth_cache_path = None if options.cache_auth: auth_cache_path = os.path.join( tempfile.gettempdir(), '%s.swiftly' % os.environ.get('USER', 'user')) if not options.auth_url: with self.context.io_manager.with_stderr() as fp: fp.write('No Auth URL has been given.\n') fp.flush() return None, None self.context.client_manager = ClientManager( StandardClient, auth_methods=options.auth_methods, auth_url=options.auth_url, auth_tenant=options.auth_tenant, auth_user=options.auth_user, auth_key=options.auth_key, auth_cache_path=auth_cache_path, region=options.region, snet=options.snet, attempts=options.retries + 1, eventlet=self.context.eventlet, verbose=self._verbose, http_proxy=options.proxy) self.context.cdn = options.cdn self.context.concurrency = int(options.concurrency) return options, args
def items(self, name): section_name = self._get_section_name(name) return SafeConfigParser.items(self, section_name)
class PackageConfigHandler(object): """ Manager class for packages files for tracking installation of modules """ def __init__(self): # noinspection PyUnresolvedReferences from six.moves.configparser import SafeConfigParser self.package_cfg = os.path.expanduser('~/Documents/site-packages/.pypi_packages') if not os.path.isfile(self.package_cfg): print('Creating package file') with open(self.package_cfg, 'w') as outs: outs.close() self.parser = SafeConfigParser() self.parser.read(self.package_cfg) def save(self): with open(self.package_cfg, 'w') as outs: self.parser.write(outs) def add_module(self, pkg_info): """ :param pkg_info: A dict that has name, url, version, summary :return: """ if not self.parser.has_section(pkg_info['name']): self.parser.add_section(pkg_info['name']) self.parser.set(pkg_info['name'], 'url', pkg_info['url']) self.parser.set(pkg_info['name'], 'version', pkg_info['version']) self.parser.set(pkg_info['name'], 'summary', pkg_info['summary']) self.parser.set(pkg_info['name'], 'files', pkg_info['files']) self.parser.set(pkg_info['name'], 'dependency', pkg_info['dependency']) self.save() def list_modules(self): return [module for module in self.parser.sections()] def module_exists(self, name): return self.parser.has_section(name) def get_info(self, name): if self.parser.has_section(name): tbl = {} for opt, value in self.parser.items(name): tbl[opt] = value return tbl def remove_module(self, name): self.parser.remove_section(name) self.save() def get_files_installed(self, section_name): if self.parser.has_option(section_name, 'files'): files = self.parser.get(section_name, 'files').strip() return files.split(',') else: return None def get_dependencies(self, section_name): if self.parser.has_option(section_name, 'dependency'): dependencies = self.parser.get(section_name, 'dependency').strip() return set(dependencies.split(',')) if dependencies != '' else set() else: return None def get_all_dependencies(self, exclude_module=()): all_dependencies = set() for section_name in self.parser.sections(): if section_name not in exclude_module and self.parser.has_option(section_name, 'dependency'): dependencies = self.parser.get(section_name, 'dependency').strip() if dependencies != '': for dep in dependencies.split(','): all_dependencies.add(dep) return all_dependencies