def read_conf(): config_file = "conf/settings.ini" parser = ConfigParser() parser.optionxform = str parser.read(config_file) for section_name in parser.sections(): if section_name == 'MongoDBServer': mongodb = {x: y for x, y in parser.items(section_name)} if section_name == 'SolrServer': solr = {x: y for x, y in parser.items(section_name)} return mongodb, solr
def load_theme(struct, path, colors, default_colors): theme = ConfigParser() with open(path, 'r') as f: theme.readfp(f) for k, v in chain(theme.items('syntax'), theme.items('interface')): if theme.has_option('syntax', k): colors[k] = theme.get('syntax', k) else: colors[k] = theme.get('interface', k) # Check against default theme to see if all values are defined for k, v in iteritems(default_colors): if k not in colors: colors[k] = v
def load_ini(self, ini_config): """ Read the provided ini contents arguments and merge the data in the ini config into the config object. ini_config is assumed to be a string of the ini file contents. """ parser = ConfigParser() parser.readfp(StringIO(ini_config)) data = { 'linters': {}, 'files': {}, 'branches': {}, } if parser.has_section('files'): ignore = parser.get('files', 'ignore') data['files']['ignore'] = newline_value(ignore) if parser.has_section('branches'): ignore = parser.get('branches', 'ignore') data['branches']['ignore'] = comma_value(ignore) linters = [] if parser.has_section('tools'): linters = comma_value(parser.get('tools', 'linters')) # Setup empty config sections for linter in linters: data['linters'][linter] = {} for section in parser.sections(): if not section.startswith('tool_'): continue # Strip off tool_ linter = section[5:] data['linters'][linter] = dict(parser.items(section)) self.update(data)
def loadConfigs(self): """Entry point to load the l10n.ini file this Parser refers to. This implementation uses synchronous loads, subclasses might overload this behaviour. If you do, make sure to pass a file-like object to onLoadConfig. """ cp = ConfigParser(self.defaults) cp.read(self.inipath) depth = self.getDepth(cp) self.base = mozpath.join(mozpath.dirname(self.inipath), depth) # create child loaders for any other l10n.ini files to be included try: for title, path in cp.items('includes'): # skip default items if title in self.defaults: continue # add child config parser self.addChild(title, path, cp) except NoSectionError: pass # try to load the "dirs" defined in the "compare" section try: self.dirs.extend(cp.get('compare', 'dirs').split()) except (NoOptionError, NoSectionError): pass # try to set "all_path" and "all_url" try: self.all_path = mozpath.join(self.base, cp.get('general', 'all')) except (NoOptionError, NoSectionError): self.all_path = None return cp
def get_config_init_cfg(configfile, service=MODE): from six.moves.configparser import ConfigParser # @UnresolvedImport conf = ConfigParser() conf.read(configfile) options = {} for option, value in conf.items(service, raw=True): options[option] = value subscribe_topics = options.get('subscribe_topics').split(',') for item in subscribe_topics: if len(item) == 0: subscribe_topics.remove(item) options['subscribe_topics'] = subscribe_topics options['number_of_threads'] = int(options.get('number_of_threads', 5)) options['maximum_pps_processing_time_in_minutes'] = int( options.get('maximum_pps_processing_time_in_minutes', 20)) options['servername'] = options.get('servername', socket.gethostname()) options['station'] = options.get('station', 'unknown') options['run_cmask_prob'] = options.get('run_cmask_prob', True) options['run_pps_cpp'] = options.get('run_pps_cpp', True) #: Change yes to True and no to False to match .yaml for arname, val in options.items(): if val == 'yes': options[arname] = True if val == 'no': options[arname] = False return options
def reload(self): # clear configuration self.properties = {} # load defaults parameters for prop in DEFAULT_PROPERTIES: self.set(prop.key, prop.default_value) # load config file TANIT_CONF_DIR config = ConfigParser() if osp.exists(self.conf_file): try: config.read(self.conf_file) except Exception as e: raise TanitConfigurationException( "Exception while loading configuration file %s.", self.conf_file, e) _logger.info("Instantiated configuration from %s.", self.conf_file) else: raise TanitConfigurationException("Invalid configuration file %s.", self.conf_file) # load site parameters for section in config.sections(): for (key, value) in config.items(section=section): self.set(ConfigurationKey(section, key), value)
def load_default_env(auppath=DEFAULT_AUPTIMIZER_PATH, log=logger, use_default=True): """Load default environment variables for aup. Search recursively to upper folder :param auppath: aup environment folder, contains `env.ini` file :type auppath: str :param log: logger obj to trace where the function is called, default is aup.utils :type log: logging.Logger :param use_default: if auppath is empty, use user's home folder instead. :type use_default: bool :return: key-value of parameters :rtype: dict """ if not path.isfile(path.join(auppath, "env.ini")): if use_default: auppath = path.join(path.expanduser("~"), ".aup") log.warning("Use default env at %s" % auppath) if not path.isfile(path.join(auppath, "env.ini")): # pragma: no cover raise Exception("Failed to find env.ini") else: raise ValueError("Auptimizer folder %s is missing" % auppath) log.info("Auptimizer environment at %s", auppath) config = ConfigParser() config.optionxform = str config.read(path.join(auppath, "env.ini")) return {i[0]: i[1] for i in config.items("Auptimizer")}
def read_config(): parser = ConfigParser() parser.read("test.cfg") rv = {"general": {}, "products": {}} rv["general"].update(dict(parser.items("general"))) # This only allows one product per whatever for now for product in parser.sections(): if product != "general": rv["products"][product] = {} for key, value in parser.items(product): rv["products"][product][key] = value return rv
def init(): options.config = os.path.abspath(options.config) if options.username == 'all': options.username = None if options.email == 'all': options.email = None config_parser = ConfigParser( dict(here=os.getcwd(), database_connection= 'sqlite:///database/universe.sqlite?isolation_level=IMMEDIATE')) config_parser.read(options.config) config_dict = {} for key, value in config_parser.items("app:main"): config_dict[key] = value config = galaxy.config.Configuration(**config_dict) object_store = build_object_store_from_config(config) from galaxy.model import mapping return (mapping.init(config.file_path, config.database_connection, create_tables=False, object_store=object_store), object_store, config.database_connection.split(':')[0])
def load(self): """load configuration of analysis from the config.txt file in self.path """ # read in the config file cfile = self.configFile configParse = ConfigParser() configParse.read(cfile) # location of data file (h5, sql, etc.) storage.open(configParse.get("analysis", "data")) # what search terms do we want to use for the data? self.kwargs = dict(configParse.items("data")) for k in self.kwargs.keys(): self.kwargs[k] = self.kwargs[k].split(",") # get data matching search terms self.data = storage.getData(**self.kwargs) # determine all factories to use in analysis self.factories = [] for f in configParse.get("analysis", "factory").split(","): self.factories.append((f, registry[f])) # self.factory = registry[configParse.get("analysis","factory")] # how to group data samples self.groupby = configParse.get("analysis", "groupby").split(",") # what is the max number of curves per grouping? self.maxSize = int(configParse.get("analysis", "maxSize")) # everything is loaded self.loaded = True
def parse_xml_mapping(xml_mapping_filename): with open(xml_mapping_filename, 'r') as f: config = ConfigParser() if six.PY3: config.read_file(f) # pylint: disable=no-member else: config.read_file(f) return {k: dict(config.items(k)) for k in config.sections()}
def read_systemini(self): """read group info from system.ini this is part of the connection process """ self.ftpconn.connect(**self.ftpargs) self.ftpconn.cwd(posixpath.join(self.ftphome, 'Config')) lines = self.ftpconn.getlines('system.ini') self.ftpconn.close() pvtgroups = [] self.stages = OrderedDict() self.groups = OrderedDict() sconf = ConfigParser() sconf.readfp(StringIO('\n'.join(lines))) # read and populate lists of groups first for gtype, glist in sconf.items('GROUPS'): # ].items(): if len(glist) > 0: for gname in glist.split(','): gname = gname.strip() self.groups[gname] = OrderedDict() self.groups[gname]['category'] = gtype.strip() self.groups[gname]['positioners'] = [] if gtype.lower().startswith('multiple'): pvtgroups.append(gname) for section in sconf.sections(): if section in ('DEFAULT', 'GENERAL', 'GROUPS'): continue items = sconf.options(section) if section in self.groups: # this is a Group Section! poslist = sconf.get(section, 'positionerinuse') posnames = [a.strip() for a in poslist.split(',')] self.groups[section]['positioners'] = posnames elif 'plugnumber' in items: # this is a stage self.stages[section] = { 'stagetype': sconf.get(section, 'stagename') } if len(pvtgroups) == 1: self.set_trajectory_group(pvtgroups[0]) for sname in self.stages: ret = self._xps.PositionerMaximumVelocityAndAccelerationGet( self._sid, sname) try: self.stages[sname]['max_velo'] = ret[1] self.stages[sname]['max_accel'] = ret[2] / 3.0 except: print("could not set max velo/accel for %s" % sname) ret = self._xps.PositionerUserTravelLimitsGet(self._sid, sname) try: self.stages[sname]['low_limit'] = ret[1] self.stages[sname]['high_limit'] = ret[2] except: print("could not set limits for %s" % sname) return self.groups
def handleApps(self, **kwargs): l10nbuilds = urlopen( 'https://raw.githubusercontent.com/Pike/master-ball/' 'master/l10n-master/l10nbuilds.ini') cp = ConfigParser() cp.readfp(l10nbuilds) for section in cp.sections(): self.stdout.write(section + '\n') self.handleSection(section, dict(cp.items(section)))
def get_configuration_dict(configuration=None, value_types=None): """ Parse the configuration files Parameters ---------- configuration : str or list, optional A configuration file or list of configuration files to parse, defaults to the deploy_default.conf file in the package and deploy.conf in the current working directory. value_types : dict, optional Dictionary containing classes to apply to specific items Returns ------- dict Configuration dictionary """ if not value_types: # pragma: no cover value_types = config_types() if configuration is None or configuration is '': # pragma: no cover configuration = [ # Config file that is part of the package # PACKAGE_DEFAULT_CONFIG, # Any deploy.conf files in the current directory 'deploy.conf' ] config = ConfigParser() # Set the config defaults try: config.read_string(config_defaults()) except AttributeError: config.readfp(io.BytesIO(config_defaults())) logger.debug('Working with default dict: %r', config_defaults()) config.read(configuration) result_dict = {} for section in config.sections(): result_dict[section] = {} for key, val in config.items(section): result_dict[section][key] = str_format_env(val) config_update(result_dict) if 'locations' not in result_dict.keys(): result_dict['locations'] = {} result_dict['locations']['package_scripts'] = package_scripts_directory() if not result_dict['global'].get('virtualenv_dir', None): result_dict['global']['virtualenv_dir'] = \ default_virtualenv_directory() cast_types(result_dict) return result_dict
def parse_config_file(file_path): config = ConfigParser() config.read(file_path) if 'batch_scoring' not in config.sections(): # We are return empty dict, because there is nothing in this file # that related to arguments to batch scoring. return {} parsed_dict = dict(config.items('batch_scoring')) return config_validator(parsed_dict)
def get_options_from_config(config_file): """Get configuration information from config like setup.cfg or tox.ini""" from six.moves.configparser import ConfigParser conf = ConfigParser() conf.read([config_file]) if conf.has_section('changelog'): return dict(conf.items('changelog')) return {}
class Config(object): """A ConfigParser wrapper to support defaults when calling instance methods, and also tied to a single section""" SECTION = 'scrapydartx' def __init__(self, values=None, extra_sources=()): if values is None: sources = self._getsources() default_config = get_data(__package__, 'default_scrapyd.conf').decode('utf8') self.cp = ConfigParser() self.cp.read_file(StringIO(default_config)) self.cp.read(sources) for fp in extra_sources: self.cp.read_file(fp) else: self.cp = ConfigParser(values) self.cp.add_section(self.SECTION) def _getsources(self): sources = ['/etc/scrapydartx/scrapydartx.conf', r'c:\scrapyd\scrapyd.conf'] sources += sorted(glob.glob('/etc/scrapydartx/conf.d/*')) sources += ['scrapydartx.conf'] sources += [expanduser('~/.scrapydartx.conf')] scrapy_cfg = closest_scrapy_cfg() if scrapy_cfg: sources.append(scrapy_cfg) return sources def _getany(self, method, option, default): try: return method(self.SECTION, option) except (NoSectionError, NoOptionError): if default is not None: return default raise def get(self, option, default=None): return self._getany(self.cp.get, option, default) def getint(self, option, default=None): return self._getany(self.cp.getint, option, default) def getfloat(self, option, default=None): return self._getany(self.cp.getfloat, option, default) def getboolean(self, option, default=None): return self._getany(self.cp.getboolean, option, default) def items(self, section, default=None): try: return self.cp.items(section) except (NoSectionError, NoOptionError): if default is not None: return default raise
def read_config(filename): """Read the config file called *filename*. """ cp_ = ConfigParser() cp_.read(filename) res = {} for section in cp_.sections(): res[section] = dict(cp_.items(section)) res[section].setdefault("delete", False) if res[section]["delete"] in ["", "False", "false", "0", "off"]: res[section]["delete"] = False res[section].setdefault("working_directory", None) res[section].setdefault("compression", False) res[section].setdefault("heartbeat", True) res[section].setdefault("req_timeout", DEFAULT_REQ_TIMEOUT) res[section].setdefault("transfer_req_timeout", 10 * DEFAULT_REQ_TIMEOUT) if res[section]["heartbeat"] in ["", "False", "false", "0", "off"]: res[section]["heartbeat"] = False if "providers" not in res[section]: LOGGER.warning("Incomplete section " + section + ": add an 'providers' item.") LOGGER.info("Ignoring section " + section + ": incomplete.") del res[section] continue else: res[section]["providers"] = [ "tcp://" + item for item in res[section]["providers"].split() ] if "destination" not in res[section]: LOGGER.warning("Incomplete section " + section + ": add an 'destination' item.") LOGGER.info("Ignoring section " + section + ": incomplete.") del res[section] continue if "topic" in res[section]: try: res[section]["publish_port"] = int( res[section]["publish_port"]) except (KeyError, ValueError): res[section]["publish_port"] = 0 elif not res[section]["heartbeat"]: # We have no topics and therefor no subscriber (if you want to # subscribe everything, then explicit specify an empty topic). LOGGER.warning("Incomplete section " + section + ": add an 'topic' item or enable heartbeat.") LOGGER.info("Ignoring section " + section + ": incomplete.") del res[section] continue return res
def check_db(config_parser): dburi = None if config_parser.has_option('app:main', 'database_connection'): dburi = config_parser.get('app:main', 'database_connection') elif config_parser.has_option('app:main', 'database_file'): db_file = config_parser.get('app:main', 'database_file') dburi = "sqlite:///%s?isolation_level=IMMEDIATE" % db_file else: sys.exit('The database configuration setting is missing from the tool_shed.ini file. Add this setting before attempting to bootstrap.') sa_session = None database_exists_message = 'The database configured for this Tool Shed is not new, so bootstrapping is not allowed. ' database_exists_message += 'Create a new database that has not been migrated before attempting to bootstrap.' try: model = tool_shed_model.init(config_parser.get('app:main', 'file_path'), dburi, engine_options={}, create_tables=False) sa_session = model.context.current sys.exit(database_exists_message) except ProgrammingError: pass except OperationalError: pass try: if sa_session is not None: result = sa_session.execute('SELECT version FROM migrate_version').first() if result[0] >= 2: sys.exit(database_exists_message) else: pass except ProgrammingError: pass if config_parser.has_option('app:main', 'hgweb_config_dir'): hgweb_config_parser = ConfigParser() hgweb_dir = config_parser.get('app:main', 'hgweb_config_dir') hgweb_config_file = os.path.join(hgweb_dir, 'hgweb.config') if not os.path.exists(hgweb_config_file): sys.exit(0) hgweb_config_parser.read(hgweb_config_file) configured_repos = hgweb_config_parser.items('paths') if len(configured_repos) >= 1: message = "This Tool Shed's hgweb.config file contains entries, so bootstrapping is not allowed. Delete" message += " the current hgweb.config file along with all associated repositories in the configured " message += "location before attempting to boostrap." sys.exit(message) else: sys.exit(0) else: sys.exit(0) sys.exit(0)
def main(): patcher.monkey_patch() hubs.get_hub().debug_exceptions = False conffile = '/etc/swift/dispersion.conf' parser = OptionParser(usage=''' Usage: %%prog [options] [conf_file] [conf_file] defaults to %s'''.strip() % conffile) parser.add_option('-j', '--dump-json', action='store_true', default=False, help='dump dispersion report in json format') parser.add_option('-d', '--debug', action='store_true', default=False, help='print 404s to standard error') parser.add_option('-p', '--partitions', action='store_true', default=False, help='print missing partitions to standard error') parser.add_option('--container-only', action='store_true', default=False, help='Only run container report') parser.add_option('--object-only', action='store_true', default=False, help='Only run object report') parser.add_option('--insecure', action='store_true', default=False, help='Allow accessing insecure keystone server. ' 'The keystone\'s certificate will not be verified.') parser.add_option('-P', '--policy-name', dest='policy_name', help="Specify storage policy name") options, args = parser.parse_args() if args: conffile = args.pop(0) if options.debug: global debug debug = True c = ConfigParser() if not c.read(conffile): exit('Unable to read config file: %s' % conffile) conf = dict(c.items('dispersion')) if options.dump_json: conf['dump_json'] = 'yes' if options.object_only: conf['container_report'] = 'no' if options.container_only: conf['object_report'] = 'no' if options.insecure: conf['keystone_api_insecure'] = 'yes' if options.partitions: conf['partitions'] = 'yes' output = generate_report(conf, options.policy_name) if json_output: print(json.dumps(output))
def load_ini(self, ini_config): """ Read the provided ini contents arguments and merge the data in the ini config into the config object. ini_config is assumed to be a string of the ini file contents. """ parser = ConfigParser() parser.readfp(StringIO(ini_config)) data = { 'linters': {}, 'files': {}, 'branches': {}, 'fixers': {}, 'review': {} } if parser.has_section('files'): ignore = parser.get('files', 'ignore') data['files']['ignore'] = newline_value(ignore) if parser.has_section('branches'): ignore = parser.get('branches', 'ignore') data['branches']['ignore'] = comma_value(ignore) linters = [] if parser.has_section('tools'): linters = comma_value(parser.get('tools', 'linters')) if parser.has_section('fixers'): data['fixers'] = dict(parser.items('fixers')) if parser.has_section('review'): data['review'] = dict(parser.items('review')) # Setup empty config sections for linter in linters: data['linters'][linter] = {} for section in parser.sections(): if not section.startswith('tool_'): continue # Strip off tool_ linter = section[5:] data['linters'][linter] = dict(parser.items(section)) self.update(data)
def __load_config(self): log.info('Reading config from %s' % self.options.config) config_parser = ConfigParser(dict(here=os.getcwd(), database_connection='sqlite:///database/universe.sqlite?isolation_level=IMMEDIATE')) config_parser.read(self.options.config) config_dict = {} for key, value in config_parser.items('app:main'): config_dict[key] = value config_dict['root_dir'] = galaxy_root self.config = galaxy.config.Configuration(**config_dict)
def load_config(filename): section = "root" try: config_text = "[%s]\n%s" % (section, open(filename).read()) except IOError as e: sys.stderr.write("load_config: %s\n" % e) config_text = "[%s]\n" % section config = ConfigParser() config.readfp(StringIO(config_text)) return config.items(section)
def test_overwrite_dict_cfg(self): c = ConfigParser(allow_no_value=True) d = { "section_a": "empty_value", "section_b": {"key_c": "val_d", "key_d": "val_d"}, "section_c": ["key_c", "key_d"], } ansible_common.overwrite_dict_to_cfg(c, d) # Python3 and Python2 convert empty values into None or '' # we don't really care but we need to compare correctly for unittest self.assertTrue(c.has_option("section_a", "empty_value")) self.assertEqual(sorted(c.items("section_b")), [('key_c', 'val_d'), ('key_d', 'val_d')]) self.assertTrue(c.has_option("section_c", "key_c")) self.assertTrue(c.has_option("section_c", "key_d"))
def config(filename='database.ini', section='postgresql'): parser = ConfigParser() parser.read(filename) db = {} if parser.has_section(section): params = parser.items(section) for param in params: db[param[0]] = param[1] else: raise Exception('Section {0} not found in the {1} file'.format( section, filename)) return db
def load(self): schemes = [defaultScheme] parser = ConfigParser() parser.read(settings.DASHBOARD_CONF) for option, default_value in defaultUIConfig.items(): if parser.has_option('ui', option): try: self.ui_config[option] = parser.getint('ui', option) except ValueError: self.ui_config[option] = parser.get('ui', option) else: self.ui_config[option] = default_value if parser.has_option('ui', 'automatic_variants'): self.ui_config['automatic_variants'] = parser.getboolean('ui', 'automatic_variants') else: self.ui_config['automatic_variants'] = True self.ui_config['keyboard_shortcuts'] = defaultKeyboardShortcuts.copy() if parser.has_section('keyboard-shortcuts'): self.ui_config['keyboard_shortcuts'].update( parser.items('keyboard-shortcuts') ) for section in parser.sections(): if section in ('ui', 'keyboard-shortcuts'): continue scheme = parser.get(section, 'scheme') fields = [] for match in fieldRegex.finditer(scheme): field = match.group(1) if parser.has_option(section, '%s.label' % field): label = parser.get(section, '%s.label' % field) else: label = field fields.append({ 'name' : field, 'label' : label }) schemes.append({ 'name' : section, 'pattern' : scheme, 'fields' : fields, }) self.schemes = schemes
def get_sa_session(ini_file): conf_parser = ConfigParser({'here': os.getcwd()}) conf_parser.read(ini_file) kwds = dict() for key, value in conf_parser.items("app:main"): kwds[key] = value ini_config = config.Configuration(**kwds) db_con = ini_config.database_connection if not db_con: db_con = "sqlite:///%s?isolation_level=IMMEDIATE" % ini_config.database model = galaxy.model.mapping.init(ini_config.file_path, db_con, engine_options={}, create_tables=False) return model.context.current, ini_config
def _get_retentions_from_storage_schemas(self, opts): """Parse storage-schemas.conf and returns all retentions.""" ret = [] config_parser = ConfigParser() if not config_parser.read(opts.storage_schemas): raise SystemExit("Error: Couldn't read config file: %s" % opts.storage_schemas) for section in config_parser.sections(): options = dict(config_parser.items(section)) retentions = options['retentions'].split(',') archives = [carbon_util.parseRetentionDef(s) for s in retentions] ret.append(bg_accessor.Retention.from_carbon(archives)) return ret
def init(): options.config = os.path.abspath(options.config) config_parser = ConfigParser(dict(here=os.getcwd(), database_connection='sqlite:///database/universe.sqlite?isolation_level=IMMEDIATE')) config_parser.read(options.config) config_dict = {} for key, value in config_parser.items("app:main"): config_dict[key] = value config = galaxy.config.Configuration(**config_dict) object_store = build_object_store_from_config(config) return (mapping.init(config.file_path, config.database_connection, create_tables=False, object_store=object_store), object_store)
def _get_retentions_from_storage_schemas(self, opts): """Parse storage-schemas.conf and returns all retentions.""" ret = [] config_parser = ConfigParser() if not config_parser.read(opts.storage_schemas): raise SystemExit( "Error: Couldn't read config file: %s" % opts.storage_schemas ) for section in config_parser.sections(): options = dict(config_parser.items(section)) retentions = options["retentions"].split(",") archives = [carbon_util.parseRetentionDef(s) for s in retentions] ret.append(bg_metric.Retention.from_carbon(archives)) return ret
def read_config_cfg(filename): """Read the config file *filename* and replace the values in global variables. """ cfg = ConfigParser() cfg.read(filename) def read_cfg_opts(section): """Read the option:value pairs in one section, converting value to int/float if applicable. """ kv_dict = {} for k, v in cfg.items(section): try: kv_dict[k] = int(v) except: try: kv_dict[k] = float(v) except: kv_dict[k] = v return kv_dict default_params = read_cfg_opts("default") pattern = {} for k, v in cfg.items("pattern"): pattern[k] = v station_list = [] for station_id in default_params["station"].split(","): station_params = read_cfg_opts(station_id) satellites = cfg.get(station_id, "satellites").split(",") sat_list = [] for sat_name in satellites: sat_list.append( schedule.Satellite(sat_name, **read_cfg_opts(sat_name))) new_station = schedule.Station(station_id, **station_params) new_station.satellites = sat_list station_list.append(new_station) scheduler = schedule.Scheduler( stations=station_list, min_pass=default_params.get("min_pass", 4), forward=default_params.get("forward"), start=default_params.get("start"), dump_url=default_params.get("dump_url", None), patterns=pattern, center_id=default_params.get("center_id", "unknown")) return scheduler
def read_config_cfg(filename): """Read the config file *filename* and replace the values in global variables. """ cfg = ConfigParser() cfg.read(filename) def read_cfg_opts(section): """Read the option:value pairs in one section, converting value to int/float if applicable. """ kv_dict = {} for k, v in cfg.items(section): try: kv_dict[k] = int(v) except: try: kv_dict[k] = float(v) except: kv_dict[k] = v return kv_dict default_params = read_cfg_opts("default") pattern = {} for k, v in cfg.items("pattern"): pattern[k] = v station_list = [] for station_id in default_params["station"].split(","): station_params = read_cfg_opts(station_id) satellites = cfg.get(station_id, "satellites").split(",") sat_list = [] for sat_name in satellites: sat_list.append(schedule.Satellite(sat_name, **read_cfg_opts(sat_name) )) new_station = schedule.Station(station_id, **station_params) new_station.satellites = sat_list station_list.append(new_station) scheduler = schedule.Scheduler(stations=station_list, min_pass=default_params.get("min_pass", 4), forward=default_params.get("forward"), start=default_params.get("start"), dump_url=default_params.get("dump_url", None), patterns=pattern, center_id=default_params.get("center_id", "unknown")) return scheduler
def parse_args(self, args=None, values=None): ''' parse the command line arguments and config file arguments ''' # first parse the command line arguments to read the 'configfile name' # (if specified) options, args = OptionParser.parse_args(self, args, values) if options.configfile: # config file is specified now. Read the config file and set the parameters # as defaults config = ConfigParser() config.read(options.configfile) options = config.items(self.section) self.set_defaults(**options) options, args = OptionParser.parse_args(self, args, values) return options, args
def getPluginSettings(themeDirectory, plugins=None): """Given an IResourceDirectory for a theme, return the settings for the given list of plugins (or all plugins, if not given) provided as a list of (name, plugin) pairs. Returns a dict of dicts, with the outer dict having plugin names as keys and containing plugins settings (key/value pairs) as values. """ if plugins is None: plugins = getPlugins() # noinspection PyPep8Naming manifestContents = {} if themeDirectory.isFile(MANIFEST_FILENAME): parser = ConfigParser() fp = themeDirectory.openFile(MANIFEST_FILENAME) try: if six.PY2: parser.readfp(fp) else: parser.read_string(fp.read().decode()) for section in parser.sections(): manifestContents[section] = {} for name, value in parser.items(section): manifestContents[section][name] = value finally: try: fp.close() except AttributeError: pass pluginSettings = {} for name, plugin in plugins: pluginSettings[name] = manifestContents.get( "%s:%s" % (THEME_RESOURCE_NAME, name), {}) # noqa return pluginSettings
def read_db_config(filename='/path/to/your/directory/rpi_control/config.ini', section='mysql'): """ Read database configuration file and return a dictionary object :param filename: name of the configuration file :param section: section of database configuration :return: a dictionary of database parameters """ # create parser and read ini configuration file parser = ConfigParser() parser.read(filename) # get section, default to mysql db = {} if parser.has_section(section): items = parser.items(section) for item in items: db[item[0]] = item[1] else: raise Exception('{0} not found in the {1} file'.format(section, filename)) return db print db
class Namespaces(object): r"""Helper for namespaces. The config file would look like: ``` [carbon-relay] pattern = carbon\.relay\.* [carbon-cache] pattern = carbon\.agents\.* [carbon-aggregator] pattern = carbon\.aggregator\.* [prometheus] pattern = prometheus\.* ``` """ def __init__(self, filename=None): """Initializer.""" self.config = ConfigParser({}, collections.OrderedDict) self.patterns = collections.OrderedDict() if not filename: self.patterns[re.compile(".*")] = "total" self.config.add_section("total") return self.config.read(filename) for section in self.config.sections(): pattern = re.compile(self.config.get(section, "pattern")) self.patterns[pattern] = section def lookup(self, metric_name): """Return the namespace corresponding to the metric.""" for pattern, section in self.patterns.items(): if pattern.match(metric_name): return section, self.config.items(section) return "none", None
def get_conf_stanzas(conf_name): '''Get stanzas of `conf_name` :param conf_name: Config file. :type conf_name: ``string`` :returns: Config stanzas. :rtype: ``dict`` Usage:: >>> stanzas = get_conf_stanzas('server') >>> return: {'serverName': 'testServer', 'sessionTimeout': '1h', ...} ''' if conf_name.endswith('.conf'): conf_name = conf_name[:-5] # TODO: dynamically caculate SPLUNK_HOME btool_cli = [ op.join(os.environ['SPLUNK_HOME'], 'bin', 'btool'), conf_name, 'list' ] p = subprocess.Popen(btool_cli, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, _ = p.communicate() if isinstance(out, bytes): out = out.decode() out = StringIO(out) parser = ConfigParser() parser.optionxform = str if sys.version_info[:2] >= (3, 2): parser.read_file(out) else: parser.readfp(out) out = {} for section in parser.sections(): out[section] = {item[0]: item[1] for item in parser.items(section)} return out
def loadSubs(self, filename): """Load a substitutions file. The file must be in the Windows-style INI format (see the standard ConfigParser module docs for information on this format). Each section of the file is loaded into its own substituter. """ inFile = file(filename) parser = ConfigParser() parser.readfp(inFile, filename) inFile.close() for s in parser.sections(): # Add a new WordSub instance for this section. If one already # exists, delete it. if s in self._subbers: del(self._subbers[s]) self._subbers[s] = WordSub() # iterate over the key,value pairs and add them to the subber for k, v in parser.items(s): self._subbers[s][k] = v
def main(): ini_file = sys.argv[1] conf_parser = ConfigParser({'here': os.getcwd()}) conf_parser.read(ini_file) configuration = {} for key, value in conf_parser.items("app:main"): configuration[key] = value database_connection = configuration['database_connection'] file_path = configuration['file_path'] app = TestApplication(database_connection=database_connection, file_path=file_path) jobs = {} try: for job in app.model.Job.filter(sa.and_(app.model.Job.table.c.create_time < '2008-12-16', app.model.Job.table.c.state == 'ok', app.model.Job.table.c.tool_id == 'gops_join_1', sa.not_(app.model.Job.table.c.command_line.like('%-m 1 %')))).all(): print("# processing job id %s" % str(job.id)) for jtoda in job.output_datasets: print("# --> processing JobToOutputDatasetAssociation id %s" % str(jtoda.id)) hda = app.model.HistoryDatasetAssociation.get(jtoda.dataset_id) print("# ----> processing HistoryDatasetAssociation id %s" % str(hda.id)) if not hda.deleted: # Probably don't need this check, since the job state should suffice, but... if hda.dataset.state == 'ok': history = app.model.History.get(hda.history_id) print("# ------> processing history id %s" % str(history.id)) if history.user_id: cmd_line = str(job.command_line) new_output = tempfile.NamedTemporaryFile('w') new_cmd_line = " ".join(map(str, cmd_line.split()[:4])) + " " + new_output.name + " " + " ".join(map(str, cmd_line.split()[5:])) job_output = cmd_line.split()[4] try: os.system(new_cmd_line) except Exception: pass diff_status = os.system('diff %s %s >> /dev/null' % (new_output.name, job_output)) if diff_status == 0: continue print("# --------> Outputs differ") user = app.model.User.get(history.user_id) jobs[job.id] = {} jobs[job.id]['hda_id'] = hda.id jobs[job.id]['hda_name'] = hda.name jobs[job.id]['hda_info'] = hda.info jobs[job.id]['history_id'] = history.id jobs[job.id]['history_name'] = history.name jobs[job.id]['history_update_time'] = history.update_time jobs[job.id]['user_email'] = user.email except Exception as e: print("# caught exception: %s" % e) print("\n\n# Number of incorrect Jobs: %d\n\n" % (len(jobs))) print("#job_id\thda_id\thda_name\thda_info\thistory_id\thistory_name\thistory_update_time\tuser_email") for jid in jobs: print('%s\t%s\t"%s"\t"%s"\t%s\t"%s"\t"%s"\t%s' % (str(jid), str(jobs[jid]['hda_id']), jobs[jid]['hda_name'], jobs[jid]['hda_info'], str(jobs[jid]['history_id']), jobs[jid]['history_name'], jobs[jid]['history_update_time'], jobs[jid]['user_email'])) sys.exit(0)
def makeconfigfile(fname,beamlist,radarname,simparams_orig): """This will make the config file based off of the desired input parmeters. Inputs fname - Name of the file as a string. beamlist - A list of beams numbers used by the AMISRS radarname - A string that is the name of the radar being simulated. simparams_orig - A set of simulation parameters in a dictionary.""" fname = Path(fname).expanduser() curpath = Path(__file__).resolve().parent d_file = curpath/'default.ini' fext = fname.suffix # reduce the number of stuff needed to be saved and avoid problems with writing keys2save = ['IPP', 'TimeLim', 'RangeLims', 'Pulselength', 't_s', 'Pulsetype', 'Tint', 'Fitinter', 'NNs', 'dtype', 'ambupsamp', 'species', 'numpoints', 'startfile', 'FitType','beamrate', 'outangles'] if not 'beamrate' in simparams_orig.keys(): simparams_orig['beamrate'] = 1 if not 'outangles' in simparams_orig.keys(): simparams_orig['outangles'] = beamlist simparams = {i:simparams_orig[i] for i in keys2save} if fext =='.pickle': pickleFile = fname.open('wb') pickle.dump([{'beamlist':beamlist,'radarname':radarname},simparams],pickleFile) pickleFile.close() elif fext=='.yml': with fname.open('w') as f: yaml.dump([{'beamlist':beamlist,'radarname':radarname},simparams], f) elif fext =='.ini': defaultparser = ConfigParser() defaultparser.read(str(d_file)) # config = configparser() # config.read(fname) cfgfile = open(str(fname),'w') config = ConfigParser(allow_no_value = True) config.add_section('section 1') beamstring = "" for beam in beamlist: beamstring += str(beam) beamstring += " " config.set('section 1','; beamlist must be list of ints') config.set('section 1','beamlist',beamstring) config.set('section 1','; radarname can be pfisr, risr, or sondastrom') config.set('section 1','radarname',radarname) config.add_section('simparams') config.add_section('simparamsnames') defitems = [i[0] for i in defaultparser.items('simparamsnotes')] for param in simparams: if param=='Beamlist': continue if param.lower() in defitems: paramnote = defaultparser.get('simparamsnotes',param.lower()) else: paramnote = 'Not in default parameters' config.set('simparams','; '+param +' '+paramnote) # for the output list of angles if param.lower()=='outangles': outstr = '' beamlistlist = simparams[param] if beamlistlist=='': beamlistlist=beamlist for ilist in beamlistlist: if isinstance(ilist,list) or isinstance(ilist,sp.ndarray): for inum in ilist: outstr=outstr+str(inum)+' ' else: outstr=outstr+str(ilist) outstr=outstr+', ' outstr=outstr[:-2] config.set('simparams',param,outstr) elif isinstance(simparams[param],list): data = "" for a in simparams[param]: data += str(a) data += " " config.set('simparams',param,str(data)) else: #TODO config.set() is obsolete, undefined behavior! use mapping protocol instead https://docs.python.org/3/library/configparser.html#mapping-protocol-access config.set('simparams',param,str(simparams[param])) config.set('simparamsnames',param,param) config.write(cfgfile) cfgfile.close() else: raise ValueError('fname needs to have an extension of .pickle or .ini')
def load(cfg_file=None, environment=None, overrides=None): """ Load configuration. A configuration file consists of sections, led by a ``[section]`` header and followed by ``name: value`` entries. Lines beginning with ``'#'`` are ignored and may be used to provide comments. A configuration file can contain multiple sections. The configuration object is populated with values from the ``global`` section and additional sections based on the fully qualified domain name of the local host. For example, on the host ``tin.eqiad.wmnet`` the final value for a given setting would be the first value found in sections: ``tin.eqiad.wmnet``, ``eqiad.wmnet``, ``wmnet`` or ``global``. Sections not present in the configuration file will be ignored. Configuration values are loaded from a file specified by the ``-c`` or ``--conf`` command-line options or from the default locations with the following hierarchy, sorted by override priority: #. ``$(pwd)/scap/environments/<environment>/scap.cfg`` or ``$(pwd)/scap/scap.cfg`` (if no environment was specified) #. ``/etc/scap.cfg`` For example, if a configuration parameter is set in ``$(pwd)/scap/scap.cfg`` and that same parameter is set in ``/etc/scap.cfg`` the value for that parameter set in ``$(pwd)/scap/scap.cfg`` will be used during execution. :param cfg_file: Alternate configuration file :param environment: the string path under which scap.cfg is found :param overrides: Dict of configuration values :returns: dict of configuration values """ local_cfg = os.path.join(os.getcwd(), 'scap') parser = ConfigParser() if cfg_file: try: cfg_file = open(cfg_file) except TypeError: # Assume that cfg_file is already an open file pass parser.readfp(cfg_file) else: parser.read([ '/etc/scap.cfg', os.path.join(local_cfg, 'scap.cfg'), utils.get_env_specific_filename( os.path.join(local_cfg, 'scap.cfg'), environment ) ]) fqdn = socket.getfqdn().split('.') sections = ['global'] sections += ['.'.join(fqdn[l:]) for l in range(0, len(fqdn))][::-1] config = {key: value for key, (_, value) in DEFAULT_CONFIG.items()} for section in sections: if parser.has_section(section): # Do not interpolate items in the section. # Fixes crash on tin: 'int' object has no attribute 'find' for key, value in parser.items(section, True): config[key] = coerce_value(key, value) config = override_config(config, overrides) if not environment and config.get('environment', None): return load(cfg_file, config.get('environment'), overrides) config['environment'] = environment return config
def _read_ini(self, path): if self.ini_config_section is None: raise ValueError() parser = ConfigParser() parser.read(path) return {k: v for k, v in parser.items(self.ini_config_section)}
"""Validates the public username.""" if len(username) < 3: return "Public name must be at least 3 characters in length" if len(username) > 255: return "Public name cannot be more than 255 characters in length" if not(VALID_PUBLICNAME_RE.match(username)): return "Public name must contain only lower-case letters, numbers and '-'" return '' if __name__ == "__main__": parser = optparse.OptionParser(description='Create a user with API key.') parser.add_option('-c', dest='config', action='store', help='.ini file to retried toolshed configuration from') (args, options) = parser.parse_args() ini_file = args.config config_parser = ConfigParser({'here': os.getcwd()}) print("Reading ini file: ", ini_file) config_parser.read(ini_file) config_dict = {} for key, value in config_parser.items("app:main"): config_dict[key] = value config = tool_shed_config.Configuration(**config_dict) app = BootstrapApplication(config) user = create_user(app) if user is not None: api_key = create_api_key(app, user) print("Created new user with public username '", user.username, ". An API key was also created and associated with the user.") sys.exit(0) else: sys.exit("Problem creating a new user and an associated API key.")
def __init__(self, app, conf): self.app = app self.memcache_servers = conf.get('memcache_servers') serialization_format = conf.get('memcache_serialization_support') try: # Originally, while we documented using memcache_max_connections # we only accepted max_connections max_conns = int(conf.get('memcache_max_connections', conf.get('max_connections', 0))) except ValueError: max_conns = 0 memcache_options = {} if (not self.memcache_servers or serialization_format is None or max_conns <= 0): path = os.path.join(conf.get('swift_dir', '/etc/swift'), 'memcache.conf') memcache_conf = ConfigParser() if memcache_conf.read(path): # if memcache.conf exists we'll start with those base options try: memcache_options = dict(memcache_conf.items('memcache')) except NoSectionError: pass if not self.memcache_servers: try: self.memcache_servers = \ memcache_conf.get('memcache', 'memcache_servers') except (NoSectionError, NoOptionError): pass if serialization_format is None: try: serialization_format = \ memcache_conf.get('memcache', 'memcache_serialization_support') except (NoSectionError, NoOptionError): pass if max_conns <= 0: try: new_max_conns = \ memcache_conf.get('memcache', 'memcache_max_connections') max_conns = int(new_max_conns) except (NoSectionError, NoOptionError, ValueError): pass # while memcache.conf options are the base for the memcache # middleware, if you set the same option also in the filter # section of the proxy config it is more specific. memcache_options.update(conf) connect_timeout = float(memcache_options.get( 'connect_timeout', CONN_TIMEOUT)) pool_timeout = float(memcache_options.get( 'pool_timeout', POOL_TIMEOUT)) tries = int(memcache_options.get('tries', TRY_COUNT)) io_timeout = float(memcache_options.get('io_timeout', IO_TIMEOUT)) if not self.memcache_servers: self.memcache_servers = '127.0.0.1:11211' if max_conns <= 0: max_conns = 2 if serialization_format is None: serialization_format = 2 else: serialization_format = int(serialization_format) self.memcache = MemcacheRing( [s.strip() for s in self.memcache_servers.split(',') if s.strip()], connect_timeout=connect_timeout, pool_timeout=pool_timeout, tries=tries, io_timeout=io_timeout, allow_pickle=(serialization_format == 0), allow_unpickle=(serialization_format <= 1), max_conns=max_conns)
class Configuration(object): defaults = {} def __init__(self, filename=None): self._config = ConfigParser() self._set_defaults() self._state_drivers = {} if filename is not None: self.load(filename) def _set_defaults(self): """Set defaults for config """ self._config.add_section('main') for key, value in six.iteritems(self.defaults): if isinstance(value, dict): self._config.add_section(key) for subkey, subvalue in six.iteritems(value): self._config.set(key, subkey, subvalue) else: self._config.set('main', key, value) def load(self, filename): """Load the configuration by filename """ self._config.read(filename) def save(self, filename): """Save the configuration to a file """ with open(filename, 'w') as handle: self._config.write(handle) @staticmethod def sanitize(items): options = {} for key, value in items: if key.endswith('[int]'): options[key[:-5]] = int(value) elif key.endswith('[bool]'): value = value.lower() if value in BOOL_MAP[True]: value = True elif value in BOOL_MAP[False]: value = False else: raise ValueError('Expected boolean for {}'.format(key)) options[key[:-6]] = value else: options[key] = value return options def __getitem__(self, name): if self._config.has_section(name): return self.sanitize(self._config.items(name)) elif name == 'main': raise ValueError('Missing main section of configuration') return self['main'][name] def state_driver(self, name='ai'): """Get an instance of the state driver """ from database import state if name not in self._state_drivers: extras = self[name] driver = extras.pop('state-driver') if driver == 'redis': self._state_drivers[name] = state.RedisDriver(self, extras) elif driver == 'dict': self._state_drivers[name] = state.MemoryDriver(self, extras) else: raise ValueError('Unknown state driver') return self._state_drivers[name]