def load_config(config_file=_CONFIG_FILE): cfg_parser = ConfigParser() if not os.path.isfile(config_file): raise Exception( 'configuration file not found: {0}'.format(config_file)) cfg_parser.read(config_file) _cfg.load_config(cfg_parser)
def setUp(self): rid = '60754-10' config = ConfigParser() p = '/Users/ross/Sandbox/pychron_validation_data.cfg' config.read(p) signals = [list(map(float, x.split(','))) for x in [config.get('Signals-{}'.format(rid), k) for k in ['ar40', 'ar39', 'ar38', 'ar37', 'ar36']]] blanks = [list(map(float, x.split(','))) for x in [config.get('Blanks-{}'.format(rid), k) for k in ['ar40', 'ar39', 'ar38', 'ar37', 'ar36']]] irradinfo = [list(map(float, x.split(','))) for x in [config.get('irrad-{}'.format(rid), k) for k in ['k4039', 'k3839', 'ca3937', 'ca3837', 'ca3637', 'cl3638']]] j = config.get('irrad-{}'.format(rid), 'j') j = [float(x) for x in j.split(',')] baselines = [(0, 0), (0, 0), (0, 0), (0, 0), (0, 0)] backgrounds = [(0, 0), (0, 0), (0, 0), (0, 0), (0, 0)] ar37df = config.getfloat('irrad-{}'.format(rid), 'ar37df') t = math.log(ar37df) / (constants.lambda_37.nominal_value * 365.25) irradinfo.append(t) # load results r = 'results-{}'.format(rid) self.age = config.getfloat(r, 'age') self.rad4039 = config.getfloat(r, 'rad4039') self.ca37k39 = config.getfloat(r, 'ca37k39') self.age_dict = calculate_arar_age(signals, baselines, blanks, backgrounds, j, irradinfo, )
def reload_constraints(): """ Parse SWIFT_CONF_FILE and reset module level global contraint attrs, populating OVERRIDE_CONSTRAINTS AND EFFECTIVE_CONSTRAINTS along the way. """ global SWIFT_CONSTRAINTS_LOADED, OVERRIDE_CONSTRAINTS SWIFT_CONSTRAINTS_LOADED = False OVERRIDE_CONSTRAINTS = {} constraints_conf = ConfigParser() if constraints_conf.read(utils.SWIFT_CONF_FILE): SWIFT_CONSTRAINTS_LOADED = True for name in DEFAULT_CONSTRAINTS: try: value = constraints_conf.get('swift-constraints', name) except NoOptionError: pass except NoSectionError: # We are never going to find the section for another option break else: try: value = int(value) except ValueError: value = utils.list_from_csv(value) OVERRIDE_CONSTRAINTS[name] = value for name, default in DEFAULT_CONSTRAINTS.items(): value = OVERRIDE_CONSTRAINTS.get(name, default) EFFECTIVE_CONSTRAINTS[name] = value # "globals" in this context is module level globals, always. globals()[name.upper()] = value
def pytest_collect_file(path, parent): """Handle running pylint on files discovered""" config = parent.config if not config.option.pylint: return if not path.ext == ".py": return # Find pylintrc to check ignore list pylintrc_file = config.option.pylint_rcfile or PYLINTRC # No pylintrc, therefore no ignores, so return the item. if not pylintrc_file or not exists(pylintrc_file): return PyLintItem(path, parent) pylintrc = ConfigParser() pylintrc.read(pylintrc_file) ignore_list = [] try: ignore_string = pylintrc.get('MASTER', 'ignore') if len(ignore_string) > 0: ignore_list = ignore_string.split(',') except (NoSectionError, NoOptionError): pass rel_path = path.strpath.replace(parent.fspath.strpath, '', 1)[1:] if not any(basename in rel_path for basename in ignore_list): return PyLintItem(path, parent)
def _load_object_post_as_copy_conf(self, conf): if ('object_post_as_copy' in conf or '__file__' not in conf): # Option is explicitly set in middleware conf. In that case, # we assume operator knows what he's doing. # This takes preference over the one set in proxy app return cp = ConfigParser() if os.path.isdir(conf['__file__']): read_conf_dir(cp, conf['__file__']) else: cp.read(conf['__file__']) try: pipe = cp.get("pipeline:main", "pipeline") except (NoSectionError, NoOptionError): return proxy_name = pipe.rsplit(None, 1)[-1] proxy_section = "app:" + proxy_name try: conf['object_post_as_copy'] = cp.get(proxy_section, 'object_post_as_copy') except (NoSectionError, NoOptionError): pass
def read_config_file(cfgfile, options): config = ConfigParser() config.readfp(open(cfgfile)) if config.has_option('testflo', 'skip_dirs'): skips = config.get('testflo', 'skip_dirs') options.skip_dirs = [s.strip() for s in skips.split(',') if s.strip()]
def sync(): # Add or replace the relevant properites from galaxy.ini # into reports.ini reports_config_file = "config/reports.ini" if len(argv) > 1: reports_config_file = argv[1] universe_config_file = "config/galaxy.ini" if len(argv) > 2: universe_config_file = argv[2] parser = ConfigParser() parser.read(universe_config_file) with open(reports_config_file, "r") as f: reports_config_lines = f.readlines() replaced_properties = set([]) with open(reports_config_file, "w") as f: # Write all properties from reports config replacing as # needed. for reports_config_line in reports_config_lines: (line, replaced_property) = get_synced_line(reports_config_line, parser) if replaced_property: replaced_properties.add(replaced_property) f.write(line) # If any properties appear in universe config and not in # reports write these as well. for replacement_property in REPLACE_PROPERTIES: if parser.has_option(MAIN_SECTION, replacement_property) and \ not (replacement_property in replaced_properties): f.write(get_universe_line(replacement_property, parser))
def get_stackstorm_version(): """ Return StackStorm version including git commit revision if running a dev release and a file with package metadata which includes git revision is available. :rtype: ``str`` """ if 'dev' in __version__: version = __version__ if not os.path.isfile(PACKAGE_METADATA_FILE_PATH): return version config = ConfigParser() try: config.read(PACKAGE_METADATA_FILE_PATH) except Exception: return version try: git_revision = config.get('server', 'git_sha') except Exception: return version version = '%s (%s)' % (version, git_revision) else: version = __version__ return version
def get_config(options): S3CFG = os.getenv("S3CFG", None) if options.config: # Command-line overrides everything cfg = options.config elif S3CFG is not None: # Environment variable overrides defaults cfg = S3CFG else: # New default new_default = os.path.expanduser("~/.pegasus/s3cfg") if os.path.isfile(new_default): cfg = new_default else: # If the new default doesn't exist, try the old default cfg = os.path.expanduser("~/.s3cfg") if not os.path.isfile(cfg): raise Exception("Config file not found") debug("Found config file: %s" % cfg) # Make sure nobody else can read the file mode = os.stat(cfg).st_mode if mode & (stat.S_IRWXG | stat.S_IRWXO): raise Exception("Permissions of config file %s are too liberal" % cfg) config = ConfigParser(DEFAULT_CONFIG) config.read(cfg) return config
def handleSection(self, section, items): locales = items['locales'] if locales == 'all': inipath = '/'.join(( items['repo'], items['mozilla'], 'raw-file', 'default', items['l10n.ini'] )) ini = ConfigParser() ini.readfp(urlopen(inipath)) allpath = urljoin( urljoin(inipath, ini.get('general', 'depth')), ini.get('general', 'all')) locales = urlopen(allpath).read() locales = locales.split() obs = (Active.objects .filter(run__tree__code=section) .exclude(run__locale__code__in=locales) .order_by('run__locale__code')) obslocs = ' '.join(obs.values_list('run__locale__code', flat=True)) if not obslocs: self.stdout.write(' OK\n') return s = input('Remove %s? [Y/n] ' % obslocs) if s.lower() == 'y' or s == '': obs.delete()
def read_mcf(mcf): """returns dict of ConfigParser object""" mcf_list = [] def makelist(mcf2): """recursive function for MCF by reference inclusion""" c = ConfigParser() LOGGER.debug('reading {}'.format(mcf2)) with codecs.open(mcf2, encoding='utf-8') as fh: c.readfp(fh) mcf_dict = c.__dict__['_sections'] for section in mcf_dict.keys(): if 'base_mcf' in mcf_dict[section]: base_mcf_path = get_abspath(mcf, mcf_dict[section]['base_mcf']) makelist(base_mcf_path) mcf_list.append(mcf2) else: # leaf mcf_list.append(mcf2) makelist(mcf) c = ConfigParser() for mcf_file in mcf_list: LOGGER.debug('reading {}'.format(mcf)) with codecs.open(mcf_file, encoding='utf-8') as fh: c.readfp(fh) mcf_dict = c.__dict__['_sections'] return mcf_dict
def update_from(self, file_path): """ Reads and loads user customised settings from the given file path. :param file_path: Absolute path to user settings file conf.cfg. """ assert isinstance(file_path, six.text_type) cfg_parser = ConfigParser(inline_comment_prefixes=("#",)) cfg_parser.read(file_path) ptpycfg = cfg_parser["ptpython"] converters = [ConfigParser.getboolean, ConfigParser.getint, ConfigParser.getfloat] for key in ptpycfg: converted = False if key not in self.user_defined: # Only settings provided in initial defaults dict can get # customised with user defined values from conf.cfg file. continue for func in converters: try: value = func(cfg_parser, "ptpython", key) except ValueError: continue else: self.user_defined[key] = value converted = True break if not converted: self.user_defined[key] = ptpycfg.get(key, "")
def _populate_config_from_old_location(self, conf): if ('rate_limit_after_segment' in conf or 'rate_limit_segments_per_sec' in conf or 'max_get_time' in conf or '__file__' not in conf): return cp = ConfigParser() if os.path.isdir(conf['__file__']): read_conf_dir(cp, conf['__file__']) else: cp.read(conf['__file__']) try: pipe = cp.get("pipeline:main", "pipeline") except (NoSectionError, NoOptionError): return proxy_name = pipe.rsplit(None, 1)[-1] proxy_section = "app:" + proxy_name for setting in ('rate_limit_after_segment', 'rate_limit_segments_per_sec', 'max_get_time'): try: conf[setting] = cp.get(proxy_section, setting) except (NoSectionError, NoOptionError): pass
def restore_rois(self, roifile): """restore ROI setting from ROI.dat file""" cp = ConfigParser() cp.read(roifile) rois = [] self.mcas[0].clear_rois() prefix = self.mcas[0]._prefix if prefix.endswith('.'): prefix = prefix[:-1] iroi = 0 for a in cp.options('rois'): if a.lower().startswith('roi'): name, dat = cp.get('rois', a).split('|') lims = [int(i) for i in dat.split()] lo, hi = lims[0], lims[1] # print('ROI ', name, lo, hi) roi = ROI(prefix=prefix, roi=iroi) roi.LO = lo roi.HI = hi roi.NM = name.strip() rois.append(roi) iroi += 1 poll(0.050, 1.0) self.mcas[0].set_rois(rois) cal0 = self.mcas[0].get_calib() for mca in self.mcas[1:]: mca.set_rois(rois, calib=cal0)
def reader(filename=None): if filename is None: filename = ini_file cfg = ConfigParser() log.debug("Reading configuration from {} ...".format(ini_file)) cfg.read(filename) return cfg
def _get_attach_points(self, info, size_request): has_attach_points_, attach_points = info.get_attach_points() attach_x = attach_y = 0 if attach_points: # this works only for Gtk < 3.14 # https://developer.gnome.org/gtk3/stable/GtkIconTheme.html # #gtk-icon-info-get-attach-points attach_x = float(attach_points[0].x) / size_request attach_y = float(attach_points[0].y) / size_request elif info.get_filename(): # try read from the .icon file icon_filename = info.get_filename().replace('.svg', '.icon') if icon_filename != info.get_filename() and \ os.path.exists(icon_filename): try: with open(icon_filename) as config_file: cp = ConfigParser() cp.readfp(config_file) attach_points_str = cp.get('Icon Data', 'AttachPoints') attach_points = attach_points_str.split(',') attach_x = float(attach_points[0].strip()) / 1000 attach_y = float(attach_points[1].strip()) / 1000 except Exception as e: logging.exception('Exception reading icon info: %s', e) return attach_x, attach_y
def build_cli_examples(_): logger = logging.getLogger('cli-examples') clidir = os.path.join(SPHINX_DIR, 'cli') exini = os.path.join(clidir, 'examples.ini') exdir = os.path.join(clidir, 'examples') if not os.path.isdir(exdir): os.makedirs(exdir) config = ConfigParser() config.read(exini) rsts = [] for sect in config.sections(): rst, cmd = _build_cli_example(config, sect, exdir, logger) if cmd: logger.info('[cli] running example {0!r}'.format(sect)) logger.debug('[cli] $ {0}'.format(cmd)) subprocess.check_call(cmd, shell=True) logger.debug('[cli] wrote {0}'.format(cmd.split()[-1])) rsts.append(rst) with open(os.path.join(exdir, 'examples.rst'), 'w') as f: f.write('.. toctree::\n :glob:\n\n') for rst in rsts: f.write(' {0}\n'.format(rst[len(SPHINX_DIR):]))
def __new__(cls): ctx, config = cls.ctx_and_config current_ctx = click.get_current_context(silent=True) if current_ctx != ctx: config = ConfigParser() config.read(cls.filenames) cls.ctx_and_config = (current_ctx, config) return config
def test_list_with_no_entries(self): name = self.make_empty_temp_file() to_config_file(name, "section.name", {"ports": []}) self.assertTrue(os.path.isfile(name)) config = ConfigParser() config.read(name) self.assertEqual("", config.get("section.name", "ports"))
def test_list_with_one_entry(self): name = self.make_empty_temp_file() to_config_file(name, "section.name", {"ports": ["port1"]}) self.assertTrue(os.path.isfile(name)) config = ConfigParser() config.read(name) self.assertListEqual(["port1"], config.get("section.name", "ports").split(","))
def handleApps(self, **kwargs): l10nbuilds = urlopen( 'https://raw.githubusercontent.com/Pike/master-ball/' 'master/l10n-master/l10nbuilds.ini') cp = ConfigParser() cp.readfp(l10nbuilds) for section in cp.sections(): self.stdout.write(section + '\n') self.handleSection(section, dict(cp.items(section)))
def parse_config_file(file_path): config = ConfigParser() config.read(file_path) if 'batch_scoring' not in config.sections(): # We are return empty dict, because there is nothing in this file # that related to arguments to batch scoring. return {} parsed_dict = dict(config.items('batch_scoring')) return config_validator(parsed_dict)
def _parseINI(text): from six.moves.configparser import ConfigParser from six.moves import cStringIO parser = ConfigParser() try: parser.read_file(cStringIO(text)) except AttributeError: # Python 2 parser.readfp(cStringIO(text)) return parser
def as_ini(self): """ """ context = self.context parser = ConfigParser() stream = cStringIO() for k, v in context.propertyItems(): parser.set('DEFAULT', k, str(v)) parser.write(stream) return stream.getvalue()
def open_buildout_configfile(filepath="buildout.cfg", write_on_exit=False): parser = ConfigParser() parser.read(filepath) try: yield parser finally: if not write_on_exit: return with open(filepath, 'w') as fd: parser.write(fd)
def test_empty_dict(self): name = self.make_empty_temp_file() to_config_file(name, "section.name", {}) self.assertTrue(os.path.isfile(name)) config = ConfigParser() config.read(name) self.assertTrue(config.has_section("section.name")) self.assertSetEqual(_VALID_KEYS, set(config.options("section.name")))
def __init__(self, *args, **kwargs): if sys.version_info[0] == 2: if sys.version_info[:2] >= (2, 6): # SafeConfigParser(dict_type=) supported in 2.6+ kwargs["dict_type"] = SortedDict ConfigParser.__init__(self, *args, **kwargs) else: kwargs["dict_type"] = SortedDict super(SortedConfigParser, self).__init__(*args, **kwargs) self.seen = set()
def _get_config(self): config = ConfigParser() try: p = get_spectrometer_config_path() except IOError: p = os.path.join(paths.spectrometer_dir, 'config.cfg') config.read(p) return config
def main(): # Setup an argparser and parse the known commands to get the config file parser = argparse.ArgumentParser(add_help=False) parser.add_argument('-C', '--config', help='Specify a config file to use', default='/etc/rpkg/rfpkg.conf') (args, other) = parser.parse_known_args() # Make sure we have a sane config file if not os.path.exists(args.config) and \ not other[-1] in ['--help', '-h', 'help']: sys.stderr.write('Invalid config file %s\n' % args.config) sys.exit(1) # Setup a configuration object and read config file data config = ConfigParser() config.read(args.config) client = rfpkg.cli.rfpkgClient(config) client.do_imports(site='rfpkg') client.parse_cmdline() if not client.args.path: try: client.args.path = pyrpkg.utils.getcwd() except: print('Could not get current path, have you deleted it?') sys.exit(1) # setup the logger -- This logger will take things of INFO or DEBUG and # log it to stdout. Anything above that (WARN, ERROR, CRITICAL) will go # to stderr. Normal operation will show anything INFO and above. # Quiet hides INFO, while Verbose exposes DEBUG. In all cases WARN or # higher are exposed (via stderr). log = pyrpkg.log client.setupLogging(log) if client.args.v: log.setLevel(logging.DEBUG) elif client.args.q: log.setLevel(logging.WARNING) else: log.setLevel(logging.INFO) # Run the necessary command try: sys.exit(client.args.command()) except KeyboardInterrupt: pass except Exception as e: log.error('Could not execute %s: %s' % (client.args.command.__name__, e)) if client.args.v: raise sys.exit(1)
def log_in(client): """Authorizes ImgurClient to use user account""" config = ConfigParser() config.read('auth.ini') access_token = config.get('credentials', 'access_token') refresh_token = config.get('credentials', 'refresh_token') if len(access_token) > 0 and len(refresh_token) > 0: client.set_user_auth(access_token, refresh_token) return client authorization_url = client.get_auth_url('pin') webbrowser.open(authorization_url) pin = input('Please input your pin\n>\t') credentials = client.authorize(pin) # grant_type default is 'pin' access_token = credentials['access_token'] refresh_token = credentials['refresh_token'] config.set('credentials', 'access_token', access_token) config.set('credentials', 'refresh_token', refresh_token) save_config(config) client.set_user_auth(access_token, refresh_token) return client
def read_test_ini(file_dir=FILE_DIR, section="FacebookAuth"): ini_file_path = os.path.join(file_dir, "test.ini") ret = {} if os.path.isfile(ini_file_path): cp = ConfigParser() cp.read(ini_file_path) if section not in cp.sections(): raise EnvironmentError( "Section '{0}' not in test.ini".format(section)) for arg in cp.options(section): ret[arg] = cp.get(section, arg) else: raise EnvironmentError( "File test.ini not existing in path '{0}'".format(FILE_DIR)) return ret
def stayloggedin(self): """ handles timeout constraints of the link before exiting """ config = ConfigParser() config.read(self.SessionFile) config['DEFAULT']['lastcommandtime'] = repr(time()) with open(self.SessionFile, 'w') as configfile: config.write(configfile) self.link._do_delay() logging.debug('Staying logged in') return
def GetConfigFromFile(cls, filename): cp = ConfigParser() assert filename in cp.read([filename]) config = cls() servers = cls.parse_multiline_options(cp.get("tests", "servers")) assert len(servers) == 2 and len(servers[0]) == 3 and len( servers[1]) == 3, "incorrect servers configuration" config.server1 = ClientInfo(*servers[0]) config.server2 = ClientInfo(*servers[1]) config.notify_email = cp.get("tests", "notify_email") config.alt_user = cp.get("tests", "alt_user") return config
def _makeInstance(self, id, portal_type, subdir, import_context): context = self.context subdir = '%s/%s' % (subdir, id) properties = self.read_data_file(import_context, '.properties', subdir) tool = getUtility(ITypesTool) try: tool.constructContent(portal_type, context, id) except ValueError: # invalid type return None content = context._getOb(id) if properties is not None: if '[DEFAULT]' not in properties: try: adp = FolderishDAVAwareFileAdapter adp(content).import_(import_context, subdir) return content except (AttributeError, MethodNotAllowed): # Fall through to old implemenatation below pass lines = properties.splitlines() stream = StringIO('\n'.join(lines)) parser = ConfigParser(defaults={ 'title': '', 'description': 'NONE' }) try: parser.read_file(stream) except AttributeError: # Python 2 parser.readfp(stream) title = parser.get('DEFAULT', 'title') description = parser.get('DEFAULT', 'description') content.setTitle(title) content.setDescription(description) return content
def parse_config(config_file=DEFAULT_CONFIG_FILE): if not os.path.exists(config_file): config_dict = dict(DEFAULT_OPTIONS) config_dict['requirement_dev'] = config_dict['requirement'] return config_dict config_dict = {} config = ConfigParser(DEFAULT_OPTIONS) config.read(config_file) config_dict['requirement'] = config.get('pip-save', 'requirement') config_dict['use_compatible'] = config.getboolean('pip-save', 'use_compatible') config_dict['requirement_dev'] = config.get('pip-save', 'requirement_dev') return config_dict
def read_config(): parser = ConfigParser() parser.read("test.cfg") rv = {"general": {}, "products": {}} rv["general"].update(dict(parser.items("general"))) # This only allows one product per whatever for now for product in parser.sections(): if product != "general": rv["products"][product] = {} for key, value in parser.items(product): rv["products"][product][key] = value return rv
def import_library(libfilepointer): """Imports a units library, replacing any existing definitions.""" global _UNIT_LIB global _UNIT_CACHE _UNIT_CACHE = {} _UNIT_LIB = ConfigParser() _UNIT_LIB.optionxform = _do_nothing _UNIT_LIB.readfp(libfilepointer) required_base_types = ['length', 'mass', 'time', 'temperature', 'angle'] _UNIT_LIB.base_names = list() #used to is_angle() and other base type checking _UNIT_LIB.base_types = dict() _UNIT_LIB.unit_table = dict() _UNIT_LIB.prefixes = dict() _UNIT_LIB.help = list() for prefix, factor in _UNIT_LIB.items('prefixes'): factor, comma, comment = factor.partition(',') _UNIT_LIB.prefixes[prefix] = float(factor) base_list = [0] * len(_UNIT_LIB.items('base_units')) for i, (unit_type, name) in enumerate(_UNIT_LIB.items('base_units')): _UNIT_LIB.base_types[unit_type] = i powers = list(base_list) powers[i] = 1 #print '%20s'%unit_type, powers #cant use add_unit because no base units exist yet _new_unit(name, 1, powers) _UNIT_LIB.base_names.append(name) #test for required base types missing = [ utype for utype in required_base_types if not utype in _UNIT_LIB.base_types ] if missing: raise ValueError('Not all required base type were present in the' ' config file. missing: %s, at least %s required' % (missing, required_base_types)) # Explicit unitless 'unit'. _new_unit('unitless', 1, list(base_list)) _update_library(_UNIT_LIB) return _UNIT_LIB
def parse_configuration_file(self, modelFileName): config = ConfigParser() config.optionxform = str config.read(modelFileName) # all sections provided in the configuration/ini file self.allSections = config.sections() # read all sections for sec in self.allSections: vars(self)[sec] = {} # example: to instantiate self.globalOptions options = config.options(sec) # example: logFileDir for opt in options: val = config.get(sec, opt) # value defined in every option self.__getattribute__( sec )[opt] = val # example: self.globalOptions['logFileDir'] = val
def getPluginSettings(themeDirectory, plugins=None): """Given an IResourceDirectory for a theme, return the settings for the given list of plugins (or all plugins, if not given) provided as a list of (name, plugin) pairs. Returns a dict of dicts, with the outer dict having plugin names as keys and containing plugins settings (key/value pairs) as values. """ if plugins is None: plugins = getPlugins() # noinspection PyPep8Naming manifestContents = {} if themeDirectory.isFile(MANIFEST_FILENAME): parser = ConfigParser() fp = themeDirectory.openFile(MANIFEST_FILENAME) try: if six.PY2: parser.readfp(fp) else: parser.read_string(fp.read().decode()) for section in parser.sections(): manifestContents[section] = {} for name, value in parser.items(section): manifestContents[section][name] = value finally: try: fp.close() except AttributeError: pass pluginSettings = {} for name, plugin in plugins: pluginSettings[name] = manifestContents.get( "%s:%s" % (THEME_RESOURCE_NAME, name), {}) # noqa return pluginSettings
def config_section_to_dict(config_file, section): """ Read a config file's section as a dict :param str config_file: filename of config file :param str section: section to pull data from :return: dict of key value pairs :rtype: dict """ config = ConfigParser() config.read(config_file) result = {key: config.get(section, key) for key in config.options(section)} return result
def get_configured_plugins(): """Retrieves a list of all plugins that the user has configured for availability within Vigilance. This list will prefer configurations from three sources in this order: 1. The VIGILANCE_PLUGINS environment variable. 2. A .vigilance file within the current working directory. 3. A setup.cfg file within the current working directory. @returns A list of plugin specifier strings. @see load_suites """ plugins = [] parser = ConfigParser() parser.add_section('vigilance') _read_config_file('setup.cfg', parser) _read_config_file('.vigilance', parser) vEnv = os.getenv('VIGILANCE_PLUGINS', None) if vEnv: parser.set('vigilance', 'plugins', vEnv) if parser.has_option('vigilance', 'plugins'): plugins = parser.get('vigilance', 'plugins').split(',') return plugins
def __init__(self, config=None, ipaconf=paths.IPA_DEFAULT_CONF): super(IPAKEMKeys, self).__init__(config) conf = ConfigParser() self.host = None self.realm = None self.ldap_uri = config.get('ldap_uri', None) if conf.read(ipaconf): self.host = conf.get('global', 'host') self.realm = conf.get('global', 'realm') if self.ldap_uri is None: self.ldap_uri = conf.get('global', 'ldap_uri', raw=True) self._server_keys = None
def read_shares(self): """get invalid user from samba share conf""" if not os.path.isdir(ShareConfiguration.SHARES_UDM_DIR): return for filename in os.listdir(ShareConfiguration.SHARES_UDM_DIR): filename = os.path.join(ShareConfiguration.SHARES_UDM_DIR, filename) cfg = ConfigParser() cfg.read(filename) try: share = Share(cfg.sections()[0]) except IndexError: continue if cfg.has_option(share.name, Restrictions.INVALID_USERS): share.invalid_users = shlex.split( cfg.get(share.name, Restrictions.INVALID_USERS)) if cfg.has_option(share.name, Restrictions.HOSTS_DENY): share.hosts_deny = shlex.split( cfg.get(share.name, Restrictions.HOSTS_DENY)) self._shares[share.name] = share
def load(self): cfp = ConfigParser() # p = os.path.join(paths.spectrometer_dir, 'config.cfg') p = get_spectrometer_config_path() cfp.read(p) gs = [] for section in cfp.sections(): g = SpectrometerParametersGroup(name=section) ps = [] for pp in cfp.options(section): v = cfp.getfloat(section, pp) ps.append(Parameter(name=pp, value=v)) g.parameters = ps gs.append(g) self.groups = gs
def update_library(filename): """ Update units in current library from `filename`, which must contain a ``units`` section. filename: string or file Source of units configuration data. """ if isinstance(filename, basestring): inp = open(filename, 'rU') else: inp = filename try: cfg = ConfigParser() cfg.optionxform = _do_nothing cfg.readfp(inp) _update_library(cfg) finally: inp.close()
def _load(self): self.files = self.options config = ConfigParser() for filepath in self.files: with codecs.open(filepath, 'r', encoding='utf-8') as stream: fakefile = StringIO("[top]\n" + stream.read()) config.readfp(fakefile) return [ self._l10n2rec(key, config.get('top', key)) for key in config.options('top') ]
def config(filename='database.ini', section='postgresql'): parser = ConfigParser() parser.read(filename) db = {} if parser.has_section(section): params = parser.items(section) for param in params: db[param[0]] = param[1] else: raise Exception('Section {0} not found in the {1} file'.format( section, filename)) return db
def restore_rois(self, roifile): """restore ROI setting from ROI.dat file""" cp = ConfigParser() cp.read(roifile) roidat = [] for a in cp.options('rois'): if a.lower().startswith('roi'): name, dat = cp.get('rois', a).split('|') lims = [int(i) for i in dat.split()] lo, hi = lims[0], lims[1] roidat.append((name.strip(), lo, hi)) for mca in self.mcas: mca.set_rois(roidat)
def pytest_sessionstart(session): """Storing pylint settings on the session""" session.pylint_files = set() session.pylint_messages = {} session.pylint_config = None session.pylintrc_file = None session.pylint_ignore = [] session.pylint_ignore_patterns = [] session.pylint_msg_template = None config = session.config # Find pylintrc to check ignore list pylintrc_file = config.option.pylint_rcfile or PYLINTRC if pylintrc_file and not exists(pylintrc_file): # The directory of pytest.ini got a chance pylintrc_file = join(dirname(str(config.inifile)), pylintrc_file) if pylintrc_file and exists(pylintrc_file): session.pylintrc_file = pylintrc_file session.pylint_config = ConfigParser() session.pylint_config.read(pylintrc_file) try: ignore_string = session.pylint_config.get('MASTER', 'ignore') if ignore_string: session.pylint_ignore = ignore_string.split(',') except (NoSectionError, NoOptionError): pass try: session.pylint_ignore_patterns = session.pylint_config.get( 'MASTER', 'ignore-patterns') except (NoSectionError, NoOptionError): pass try: session.pylint_msg_template = session.pylint_config.get( 'REPORTS', 'msg-template' ) except (NoSectionError, NoOptionError): pass
def zope_shell(): if len(sys.argv) < 2: sys.stderr.write('Usage: %s paste.ini\n' % sys.argv[0]) sys.exit(1) paste_ini = sys.argv[1] logging.config.fileConfig( paste_ini, { '__file__': paste_ini, 'here': os.path.abspath(os.path.dirname(paste_ini)) }) config = ConfigParser() config.read(paste_ini) # XXX How to get to zope.conf is the only-application specific part. db = zope.app.wsgi.config(config.get('application:cms', 'zope_conf')) # Adapted from zc.zope3recipes.debugzope.debug() globs = { '__name__': '__main__', # Not really worth using zope.app.publication.ZopePublication.root_name 'root': db.open().root()['Application'], 'zeit': sys.modules['zeit'], 'zope': sys.modules['zope'], 'transaction': sys.modules['transaction'], } if len(sys.argv) > 2: sys.argv[:] = sys.argv[2:] globs['__file__'] = sys.argv[0] exec(compile(open(sys.argv[0], "rb").read(), sys.argv[0], 'exec'), globs) sys.exit() else: zope.component.hooks.setSite(globs['root']) import code # Modeled after pyramid.scripts.pshell code.interact(local=globs, banner="""\ Python %s on %s Type "help" for more information. Environment: root ZODB application root folder (already set as ZCA site) Modules that were pre-imported for convenience: zope, zeit, transaction """ % (sys.version, sys.platform))
def options(self, section, prefix=None, expand_vars=True, expand_user=True): """ Returns all options of a *section* in a list. When *prefix* is set, only options starting with that prefix are considered. Environment variable expansion is performed on every returned option name, depending on whether *expand_vars* and *expand_user* are *True*. """ options = [] for option in ConfigParser.options(self, section): if prefix and not option.startswith(prefix): continue if expand_vars: option = os.path.expandvars(option) if expand_user: option = os.path.expanduser(option) options.append(option) return options
def __to_configparser(mcf_object): """normalize mcf input into ConfigParser object""" cp_obj = None if isinstance(mcf_object, ConfigParser): LOGGER.debug('mcf object is already a ConfigParser object') cp_obj = mcf_object elif '[metadata]' in mcf_object: LOGGER.debug('mcf object is a string') s = StringIO(mcf_object) c = ConfigParser() c.readfp(s) cp_obj = c else: LOGGER.debug('mcf object is likely a filepath') c = ConfigParser() with codecs.open(mcf_object, encoding='utf-8') as fh: c.readfp(fh) cp_obj = c return cp_obj
def test_on_start_no_ssl(self, SSLDomain): confdata = six.StringIO("""[broker] urls = amqp://broker1.example.com:5672 amqp://broker2.example.com:5672 topic_prefix = koji connect_timeout = 10 send_timeout = 60 """) if six.PY2: conf = SafeConfigParser() conf.readfp(confdata) else: conf = ConfigParser() conf.read_file(confdata) handler = protonmsg.TimeoutHandler('amqp://broker1.example.com:5672', [], conf) event = MagicMock() handler.on_start(event) event.container.connect.assert_called_once_with(url='amqp://broker1.example.com:5672', reconnect=False, ssl_domain=None) self.assertEqual(SSLDomain.call_count, 0)
def logout(self, cutConnection=True, callback=None): """ Log out from AniDB UDP API """ config = ConfigParser() config.read(self.SessionFile) if config['DEFAULT']['loggedin'] == 'yes': self.link.session = config.get('DEFAULT', 'sessionkey') result = self.handle(LogoutCommand(), callback) if cutConnection: self.cut() config['DEFAULT']['loggedin'] = 'no' with open(self.SessionFile, 'w') as configfile: config.write(configfile) logging.debug('Logging out') return result logging.debug('Not logging out') return
def parse_nm_config(cfg): parser = ConfigParser() try: if hasattr(parser, 'read_string'): # Python 3 parser.read_string(cfg) else: # Python 2 from cStringIO import StringIO parser.readfp(StringIO(cfg)) return parser except (ParsingError, TypeError) as e: api.current_logger().warning( 'Error parsing NetworkManager configuration: {}'.format(e)) return None
def handle(self, *args, **options): # type: (*Any, **Any) -> None config_file = os.path.join(os.environ["HOME"], ".zuliprc") if not os.path.exists(config_file): raise RuntimeError("No ~/.zuliprc found") config = ConfigParser() with open(config_file, 'r') as f: # Apparently, six.moves.configparser.ConfigParser is not # consistent between Python 2 and 3! if hasattr(config, 'read_file'): config.read_file(f, config_file) else: config.readfp(f, config_file) api_key = config.get("api", "key") email = config.get("api", "email") try: realm = get_realm("zulip") user_profile = get_user(email, realm) user_profile.api_key = api_key user_profile.save(update_fields=["api_key"]) except UserProfile.DoesNotExist: print("User %s does not exist; not syncing API key" % (email, ))
def test_export_site_with_exportable_simple_items_unicode_latin1(self): self._setUpAdapters() ITEM_IDS = ('foo', 'bar', 'baz') site = _makeFolder('site', site_folder=True) site._setProperty('default_charset', 'iso-8859-1', 'string') site.title = 'AAA' site.description = 'DESCRIPTION' ITEMS_TITLE = u'Actualit\xe9' ITEMS_DESCRIPTION = u'Actualit\xe9 r\xe9centes' for id in ITEM_IDS: site._setObject(id, _makeINIAware(id)) item = getattr(site, id) item.setTitle(ITEMS_TITLE) item.setDescription(ITEMS_DESCRIPTION) context = DummyExportContext(site) exporter = self._getExporter() exporter(context) self.assertEqual(len(context._wrote), 2 + len(ITEM_IDS)) filename, text, content_type = context._wrote[0] self.assertEqual(filename, 'structure/.objects') self.assertEqual(content_type, 'text/comma-separated-values') objects = [x for x in reader(StringIO(text))] self.assertEqual(len(objects), 3) for index in range(len(ITEM_IDS)): self.assertEqual(objects[index][0], ITEM_IDS[index]) self.assertEqual(objects[index][1], TEST_INI_AWARE) filename, text, content_type = context._wrote[index + 2] self.assertEqual(filename, 'structure/%s.ini' % ITEM_IDS[index]) object = site._getOb(ITEM_IDS[index]) self.assertEqual(text.strip(), object.as_ini().strip()) self.assertEqual(content_type, 'text/plain') filename, text, content_type = context._wrote[1] self.assertEqual(filename, 'structure/.properties') self.assertEqual(content_type, 'text/plain') parser = ConfigParser() parser.readfp(StringIO(text)) self.assertEqual(parser.get('DEFAULT', 'title'), ITEMS_TITLE.encode('latin1')) self.assertEqual(parser.get('DEFAULT', 'description'), ITEMS_DESCRIPTION.encode('latin1'))
def parse_config(args): """Parse configuration file for stats service.""" config = ConfigParser() config.read(args.config_file) log_file = config.get("DEFAULT", "log_file") log_lev_map = { "SYS_EMERG": logging.CRITICAL, "SYS_ALERT": logging.CRITICAL, "SYS_CRIT": logging.CRITICAL, "SYS_ERR": logging.ERROR, "SYS_WARN": logging.WARNING, "SYS_NOTICE": logging.INFO, "SYS_INFO": logging.INFO, "SYS_DEBUG": logging.DEBUG } log_level = log_lev_map[config.get("DEFAULT", "log_level")] stats_server = config.get("DEFAULT", "stats_server") state = config.get("DEFAULT", "state") return { "log_file": log_file, "log_level": log_level, "stats_server": stats_server, "state": state }
def reload_storage_policies(): """ Reload POLICIES from ``swift.conf``. """ global _POLICIES if six.PY2: policy_conf = ConfigParser() else: # Python 3.2 disallows section or option duplicates by default # strict=False allows us to preserve the older behavior policy_conf = ConfigParser(strict=False) policy_conf.read(utils.SWIFT_CONF_FILE) try: _POLICIES = parse_storage_policies(policy_conf) except PolicyError as e: raise SystemExit('ERROR: Invalid Storage Policy Configuration ' 'in %s (%s)' % (utils.SWIFT_CONF_FILE, e))
def read_config_cfg(filename): """Read the config file *filename* and replace the values in global variables. """ cfg = ConfigParser() cfg.read(filename) def read_cfg_opts(section): """Read the option:value pairs in one section, converting value to int/float if applicable. """ kv_dict = {} for k, v in cfg.items(section): try: kv_dict[k] = int(v) except: try: kv_dict[k] = float(v) except: kv_dict[k] = v return kv_dict default_params = read_cfg_opts("default") pattern = {} for k, v in cfg.items("pattern"): pattern[k] = v station_list = [] for station_id in default_params["station"].split(","): station_params = read_cfg_opts(station_id) satellites = cfg.get(station_id, "satellites").split(",") sat_list = [] for sat_name in satellites: sat_list.append( schedule.Satellite(sat_name, **read_cfg_opts(sat_name))) new_station = schedule.Station(station_id, **station_params) new_station.satellites = sat_list station_list.append(new_station) scheduler = schedule.Scheduler( stations=station_list, min_pass=default_params.get("min_pass", 4), forward=default_params.get("forward"), start=default_params.get("start"), dump_url=default_params.get("dump_url", None), patterns=pattern, center_id=default_params.get("center_id", "unknown")) return scheduler