def remote_profiles(self): """ A list of remote profiles on the device. """ remote_ini = self.app_ctx.remote_profiles_ini if not self.device.is_file(remote_ini): raise IOError("Remote file '%s' not found" % remote_ini) local_ini = tempfile.NamedTemporaryFile() self.device.pull(remote_ini, local_ini.name) cfg = ConfigParser() cfg.read(local_ini.name) profiles = [] for section in cfg.sections(): if cfg.has_option(section, "Path"): if cfg.has_option(section, "IsRelative") and cfg.getint( section, "IsRelative" ): profiles.append( posixpath.join( posixpath.dirname(remote_ini), cfg.get(section, "Path") ) ) else: profiles.append(cfg.get(section, "Path")) return profiles
def test_export_empty_site(self): self._setUpAdapters() site = _makeFolder('site', site_folder=True) site.title = 'test_export_empty_site' site.description = 'Testing export of an empty site.' context = DummyExportContext(site) exporter = self._getExporter() exporter(context) self.assertEqual(len(context._wrote), 2) filename, text, content_type = context._wrote[0] self.assertEqual(filename, 'structure/.objects') self.assertEqual(content_type, 'text/comma-separated-values') objects = [x for x in reader(StringIO(text))] self.assertEqual(len(objects), 0) filename, text, content_type = context._wrote[1] self.assertEqual(filename, 'structure/.properties') self.assertEqual(content_type, 'text/plain') parser = ConfigParser() parser.readfp(StringIO(text)) self.assertEqual(parser.get('DEFAULT', 'Title'), site.title) self.assertEqual(parser.get('DEFAULT', 'Description'), site.description)
def loadConfigs(self): """Entry point to load the l10n.ini file this Parser refers to. This implementation uses synchronous loads, subclasses might overload this behaviour. If you do, make sure to pass a file-like object to onLoadConfig. """ cp = ConfigParser(self.defaults) cp.read(self.inipath) depth = self.getDepth(cp) self.base = mozpath.join(mozpath.dirname(self.inipath), depth) # create child loaders for any other l10n.ini files to be included try: for title, path in cp.items('includes'): # skip default items if title in self.defaults: continue # add child config parser self.addChild(title, path, cp) except NoSectionError: pass # try to load the "dirs" defined in the "compare" section try: self.dirs.extend(cp.get('compare', 'dirs').split()) except (NoOptionError, NoSectionError): pass # try to set "all_path" and "all_url" try: self.all_path = mozpath.join(self.base, cp.get('general', 'all')) except (NoOptionError, NoSectionError): self.all_path = None return cp
def load_ini(self, ini_config): """ Read the provided ini contents arguments and merge the data in the ini config into the config object. ini_config is assumed to be a string of the ini file contents. """ parser = ConfigParser() parser.readfp(StringIO(ini_config)) data = { 'linters': {}, 'files': {}, 'branches': {}, } if parser.has_section('files'): ignore = parser.get('files', 'ignore') data['files']['ignore'] = newline_value(ignore) if parser.has_section('branches'): ignore = parser.get('branches', 'ignore') data['branches']['ignore'] = comma_value(ignore) linters = [] if parser.has_section('tools'): linters = comma_value(parser.get('tools', 'linters')) # Setup empty config sections for linter in linters: data['linters'][linter] = {} for section in parser.sections(): if not section.startswith('tool_'): continue # Strip off tool_ linter = section[5:] data['linters'][linter] = dict(parser.items(section)) self.update(data)
def log_in(client): """Authorizes ImgurClient to use user account""" config = ConfigParser() config.read('auth.ini') access_token = config.get('credentials', 'access_token') refresh_token = config.get('credentials', 'refresh_token') if len(access_token) > 0 and len(refresh_token) > 0: client.set_user_auth(access_token, refresh_token) return client authorization_url = client.get_auth_url('pin') webbrowser.open(authorization_url) pin = input('Please input your pin\n>\t') credentials = client.authorize(pin) # grant_type default is 'pin' access_token = credentials['access_token'] refresh_token = credentials['refresh_token'] config.set('credentials', 'access_token', access_token) config.set('credentials', 'refresh_token', refresh_token) save_config(config) client.set_user_auth(access_token, refresh_token) return client
def _load_object_post_as_copy_conf(self, conf): if ('object_post_as_copy' in conf or '__file__' not in conf): # Option is explicitly set in middleware conf. In that case, # we assume operator knows what he's doing. # This takes preference over the one set in proxy app return cp = ConfigParser() if os.path.isdir(conf['__file__']): read_conf_dir(cp, conf['__file__']) else: cp.read(conf['__file__']) try: pipe = cp.get("pipeline:main", "pipeline") except (NoSectionError, NoOptionError): return proxy_name = pipe.rsplit(None, 1)[-1] proxy_section = "app:" + proxy_name try: conf['object_post_as_copy'] = cp.get(proxy_section, 'object_post_as_copy') except (NoSectionError, NoOptionError): pass
def _populate_config_from_old_location(self, conf): if ('rate_limit_after_segment' in conf or 'rate_limit_segments_per_sec' in conf or 'max_get_time' in conf or '__file__' not in conf): return cp = ConfigParser() if os.path.isdir(conf['__file__']): read_conf_dir(cp, conf['__file__']) else: cp.read(conf['__file__']) try: pipe = cp.get("pipeline:main", "pipeline") except (NoSectionError, NoOptionError): return proxy_name = pipe.rsplit(None, 1)[-1] proxy_section = "app:" + proxy_name for setting in ('rate_limit_after_segment', 'rate_limit_segments_per_sec', 'max_get_time'): try: conf[setting] = cp.get(proxy_section, setting) except (NoSectionError, NoOptionError): pass
def get_libinfo(): from six.moves.configparser import ConfigParser # Parse setup.cfg path_cfg = os.path.join(os.path.dirname(__file__), "setup.cfg") if not os.path.isfile(path_cfg): raise ValueError( "`setup.cfg` does not exist. Read installation document and install using CMake.") cfgp = ConfigParser() cfgp.read(path_cfg) # Read NNabla lib info if sys.platform == 'win32': binary_dir = cfgp.get("cmake", "binary_dir") for root, dirs, files in os.walk(os.path.join(binary_dir, 'bin')): for file in files: if os.path.splitext(file)[1] == '.lib': export_lib = os.path.join(root, file) lib = LibInfo(cfgp.get("cmake", "target_file_name"), cfgp.get("cmake", "target_file"), cfgp.get("cmake", "target_name"), export_lib) else: lib = LibInfo(cfgp.get("cmake", "target_file_name"), cfgp.get("cmake", "target_file"), cfgp.get("cmake", "target_name"), '') print("Library name:", lib.name) print("Library file name:", lib.file_name) print("Library file:", lib.path) print("Export Library", lib.export_lib) return lib
def __init__(self, url=None): self._url = url if self._url is None: config = ConfigParser() config.read(constants.get_config_file()) self._url = config.get('OpenMotics', 'vpn_check_url') % config.get( 'OpenMotics', 'uuid')
def handleSection(self, section, items): locales = items['locales'] if locales == 'all': inipath = '/'.join(( items['repo'], items['mozilla'], 'raw-file', 'default', items['l10n.ini'] )) ini = ConfigParser() ini.readfp(urlopen(inipath)) allpath = urljoin( urljoin(inipath, ini.get('general', 'depth')), ini.get('general', 'all')) locales = urlopen(allpath).read() locales = locales.split() obs = (Active.objects .filter(run__tree__code=section) .exclude(run__locale__code__in=locales) .order_by('run__locale__code')) obslocs = ' '.join(obs.values_list('run__locale__code', flat=True)) if not obslocs: self.stdout.write(' OK\n') return s = input('Remove %s? [Y/n] ' % obslocs) if s.lower() == 'y' or s == '': obs.delete()
def setUp(self): rid = '60754-10' config = ConfigParser() p = '/Users/ross/Sandbox/pychron_validation_data.cfg' config.read(p) signals = [list(map(float, x.split(','))) for x in [config.get('Signals-{}'.format(rid), k) for k in ['ar40', 'ar39', 'ar38', 'ar37', 'ar36']]] blanks = [list(map(float, x.split(','))) for x in [config.get('Blanks-{}'.format(rid), k) for k in ['ar40', 'ar39', 'ar38', 'ar37', 'ar36']]] irradinfo = [list(map(float, x.split(','))) for x in [config.get('irrad-{}'.format(rid), k) for k in ['k4039', 'k3839', 'ca3937', 'ca3837', 'ca3637', 'cl3638']]] j = config.get('irrad-{}'.format(rid), 'j') j = [float(x) for x in j.split(',')] baselines = [(0, 0), (0, 0), (0, 0), (0, 0), (0, 0)] backgrounds = [(0, 0), (0, 0), (0, 0), (0, 0), (0, 0)] ar37df = config.getfloat('irrad-{}'.format(rid), 'ar37df') t = math.log(ar37df) / (constants.lambda_37.nominal_value * 365.25) irradinfo.append(t) # load results r = 'results-{}'.format(rid) self.age = config.getfloat(r, 'age') self.rad4039 = config.getfloat(r, 'rad4039') self.ca37k39 = config.getfloat(r, 'ca37k39') self.age_dict = calculate_arar_age(signals, baselines, blanks, backgrounds, j, irradinfo, )
def load(self): """load configuration of analysis from the config.txt file in self.path """ # read in the config file cfile = self.configFile configParse = ConfigParser() configParse.read(cfile) # location of data file (h5, sql, etc.) storage.open(configParse.get("analysis", "data")) # what search terms do we want to use for the data? self.kwargs = dict(configParse.items("data")) for k in self.kwargs.keys(): self.kwargs[k] = self.kwargs[k].split(",") # get data matching search terms self.data = storage.getData(**self.kwargs) # determine all factories to use in analysis self.factories = [] for f in configParse.get("analysis", "factory").split(","): self.factories.append((f, registry[f])) # self.factory = registry[configParse.get("analysis","factory")] # how to group data samples self.groupby = configParse.get("analysis", "groupby").split(",") # what is the max number of curves per grouping? self.maxSize = int(configParse.get("analysis", "maxSize")) # everything is loaded self.loaded = True
def test_export_site_with_non_exportable_simple_items(self): self._setUpAdapters() ITEM_IDS = ('foo', 'bar', 'baz') site = _makeFolder('site', site_folder=True) site.title = 'AAA' site.description = 'DESCRIPTION' for id in ITEM_IDS: site._setObject(id, _makeItem(id)) context = DummyExportContext(site) exporter = self._getExporter() exporter(context) self.assertEqual(len(context._wrote), 2) filename, text, content_type = context._wrote[0] self.assertEqual(filename, 'structure/.objects') self.assertEqual(content_type, 'text/comma-separated-values') objects = [x for x in reader(StringIO(text))] self.assertEqual(len(objects), 3) for index in range(len(ITEM_IDS)): self.assertEqual(objects[index][0], ITEM_IDS[index]) self.assertEqual(objects[index][1], TEST_CONTENT) filename, text, content_type = context._wrote[1] self.assertEqual(filename, 'structure/.properties') self.assertEqual(content_type, 'text/plain') parser = ConfigParser() parser.readfp(StringIO(text)) self.assertEqual(parser.get('DEFAULT', 'title'), 'AAA') self.assertEqual(parser.get('DEFAULT', 'description'), 'DESCRIPTION')
def get_libinfo(): from six.moves.configparser import ConfigParser # Parse setup.cfg path_cfg = join(dirname(__file__), "setup.cfg") if not isfile(path_cfg): raise ValueError( "`setup.cfg` does not exist. Read installation document and install using CMake.") cfgp = ConfigParser() cfgp.read(path_cfg) # Read cpu lib info cpu_lib = LibInfo(None, cfgp.get("cmake", "cpu_target_file"), cfgp.get("cmake", "cpu_target_name")) print("CPU Library name:", cpu_lib.name) print("CPU Library file:", cpu_lib.path) # Read cuda lib info cuda_lib = LibInfo(cfgp.get("cmake", "cuda_target_file_name"), cfgp.get("cmake", "cuda_target_file"), cfgp.get("cmake", "cuda_target_name")) print("CUDA Library name:", cuda_lib.name) print("CUDA Library file name:", cuda_lib.file_name) print("CUDA Library file:", cuda_lib.path) if 'INCLUDE_CUDA_CUDNN_LIB_IN_WHL' in os.environ and os.environ['INCLUDE_CUDA_CUDNN_LIB_IN_WHL'] == 'True': print("CUDA/cuDNN libraries will include into wheel package.") libs = [cfgp.get("cmake", "cuda_toolkit_root_dir"), os.path.dirname(cfgp.get("cmake", "cudnn_include_dir"))] else: libs = None return cpu_lib, cuda_lib, libs
def get_libinfo(): from six.moves.configparser import ConfigParser # Parse setup.cfg path_cfg = join(dirname(__file__), "setup.cfg") if not isfile(path_cfg): raise ValueError( "`setup.cfg` does not exist. Read installation document and install using CMake.") cfgp = ConfigParser() cfgp.read(path_cfg) # Read cpu lib info cpu_lib = LibInfo(None, cfgp.get("cmake", "cpu_target_file"), cfgp.get("cmake", "cpu_target_name")) print("CPU Library name:", cpu_lib.name) print("CPU Library file:", cpu_lib.path) # Read cuda lib info cuda_lib = LibInfo(cfgp.get("cmake", "cuda_target_file_name"), cfgp.get("cmake", "cuda_target_file"), cfgp.get("cmake", "cuda_target_name")) print("CUDA Library name:", cuda_lib.name) print("CUDA Library file name:", cuda_lib.file_name) print("CUDA Library file:", cuda_lib.path) return cpu_lib, cuda_lib
def read_systemini(self): """read group info from system.ini this is part of the connection process """ self.ftpconn.connect(**self.ftpargs) self.ftpconn.cwd(posixpath.join(self.ftphome, 'Config')) lines = self.ftpconn.getlines('system.ini') self.ftpconn.close() pvtgroups = [] self.stages = OrderedDict() self.groups = OrderedDict() sconf = ConfigParser() sconf.readfp(StringIO('\n'.join(lines))) # read and populate lists of groups first for gtype, glist in sconf.items('GROUPS'): # ].items(): if len(glist) > 0: for gname in glist.split(','): gname = gname.strip() self.groups[gname] = OrderedDict() self.groups[gname]['category'] = gtype.strip() self.groups[gname]['positioners'] = [] if gtype.lower().startswith('multiple'): pvtgroups.append(gname) for section in sconf.sections(): if section in ('DEFAULT', 'GENERAL', 'GROUPS'): continue items = sconf.options(section) if section in self.groups: # this is a Group Section! poslist = sconf.get(section, 'positionerinuse') posnames = [a.strip() for a in poslist.split(',')] self.groups[section]['positioners'] = posnames elif 'plugnumber' in items: # this is a stage self.stages[section] = { 'stagetype': sconf.get(section, 'stagename') } if len(pvtgroups) == 1: self.set_trajectory_group(pvtgroups[0]) for sname in self.stages: ret = self._xps.PositionerMaximumVelocityAndAccelerationGet( self._sid, sname) try: self.stages[sname]['max_velo'] = ret[1] self.stages[sname]['max_accel'] = ret[2] / 3.0 except: print("could not set max velo/accel for %s" % sname) ret = self._xps.PositionerUserTravelLimitsGet(self._sid, sname) try: self.stages[sname]['low_limit'] = ret[1] self.stages[sname]['high_limit'] = ret[2] except: print("could not set limits for %s" % sname) return self.groups
def _read_pypirc(self): """Reads the .pypirc file.""" rc = self._get_rc_file() if os.path.exists(rc): self.announce('Using PyPI login from %s' % rc) repository = self.repository or self.DEFAULT_REPOSITORY config = ConfigParser() config.read(rc) sections = config.sections() if 'distutils' in sections: # let's get the list of servers index_servers = config.get('distutils', 'index-servers') _servers = [ server.strip() for server in index_servers.split('\n') if server.strip() != '' ] if _servers == []: # nothing set, let's try to get the default pypi if 'pypi' in sections: _servers = ['pypi'] else: # the file is not properly defined, returning # an empty dict return {} for server in _servers: current = {'server': server} current['username'] = config.get(server, 'username') # optional params for key, default in (('repository', self.DEFAULT_REPOSITORY), ('realm', self.DEFAULT_REALM), ('password', None)): if config.has_option(server, key): current[key] = config.get(server, key) else: current[key] = default if (current['server'] == repository or current['repository'] == repository): return current elif 'server-login' in sections: # old format server = 'server-login' if config.has_option(server, 'repository'): repository = config.get(server, 'repository') else: repository = self.DEFAULT_REPOSITORY return { 'username': config.get(server, 'username'), 'password': config.get(server, 'password'), 'repository': repository, 'server': server, 'realm': self.DEFAULT_REALM } return {}
def __init__(self, config=None, ipaconf=paths.IPA_DEFAULT_CONF): super(IPAKEMKeys, self).__init__(config) conf = ConfigParser() conf.read(ipaconf) self.host = conf.get('global', 'host') self.realm = conf.get('global', 'realm') self.ldap_uri = config.get('ldap_uri', None) if self.ldap_uri is None: self.ldap_uri = conf.get('global', 'ldap_uri', None) self._server_keys = None
def read_config_file(cfgfile, options): config = ConfigParser() config.readfp(open(cfgfile)) if config.has_option('testflo', 'skip_dirs'): skips = config.get('testflo', 'skip_dirs') options.skip_dirs = [s.strip() for s in skips.split(',') if s.strip()] if config.has_option('testflo', 'num_procs'): options.num_procs = int(config.get('testflo', 'num_procs'))
def __init__(self, config=None, ipaconf=paths.IPA_DEFAULT_CONF): super(IPAKEMKeys, self).__init__(config) conf = ConfigParser() conf.read(ipaconf) self.host = conf.get('global', 'host') self.realm = conf.get('global', 'realm') self.ldap_uri = config.get('ldap_uri', None) if self.ldap_uri is None: self.ldap_uri = conf.get('global', 'ldap_uri', raw=True) self._server_keys = None
def make_oauth_client(base_url): """ Build an oauth.Client with which callers can query Allura. """ config_file = os.path.join(os.environ['HOME'], '.allurarc') cp = ConfigParser() cp.read(config_file) REQUEST_TOKEN_URL = base_url + '/rest/oauth/request_token' AUTHORIZE_URL = base_url + '/rest/oauth/authorize' ACCESS_TOKEN_URL = base_url + '/rest/oauth/access_token' oauth_key = option(cp, base_url, 'oauth_key', 'Forge API OAuth Key (%s/auth/oauth/): ' % base_url) oauth_secret = option(cp, base_url, 'oauth_secret', 'Forge API Oauth Secret: ') consumer = oauth.Consumer(oauth_key, oauth_secret) try: oauth_token = cp.get(base_url, 'oauth_token') oauth_token_secret = cp.get(base_url, 'oauth_token_secret') except NoOptionError: client = oauth.Client(consumer) resp, content = client.request(REQUEST_TOKEN_URL, 'GET') assert resp['status'] == '200', resp request_token = dict(six.moves.urllib.parse.parse_qsl(content)) pin_url = "%s?oauth_token=%s" % ( AUTHORIZE_URL, request_token['oauth_token']) if getattr(webbrowser.get(), 'name', '') == 'links': # sandboxes print(("Go to %s" % pin_url)) else: webbrowser.open(pin_url) oauth_verifier = input('What is the PIN? ') token = oauth.Token( request_token['oauth_token'], request_token['oauth_token_secret']) token.set_verifier(oauth_verifier) client = oauth.Client(consumer, token) resp, content = client.request(ACCESS_TOKEN_URL, "GET") access_token = dict(six.moves.urllib.parse.parse_qsl(content)) oauth_token = access_token['oauth_token'] oauth_token_secret = access_token['oauth_token_secret'] cp.set(base_url, 'oauth_token', oauth_token) cp.set(base_url, 'oauth_token_secret', oauth_token_secret) # save oauth token for later use cp.write(open(config_file, 'w')) print('Saving oauth tokens in {} for later re-use'.format(config_file)) print() access_token = oauth.Token(oauth_token, oauth_token_secret) oauth_client = oauth.Client(consumer, access_token) return oauth_client
def test_export_site_with_subfolders(self): self._setUpAdapters() FOLDER_IDS = ('foo', 'bar', 'baz') site = _makeFolder('site', site_folder=True) site.title = 'AAA' site.description = 'DESCRIPTION' for id in FOLDER_IDS: folder = _makeFolder(id) folder.title = 'Title: %s' % id folder.description = 'xyzzy' site._setObject(id, folder) context = DummyExportContext(site) exporter = self._getExporter() exporter(context) self.assertEqual(len(context._wrote), 2 + (2 * len(FOLDER_IDS))) filename, text, content_type = context._wrote[0] self.assertEqual(filename, 'structure/.objects') self.assertEqual(content_type, 'text/comma-separated-values') objects = [x for x in reader(StringIO(text))] self.assertEqual(len(objects), 3) for index in range(len(FOLDER_IDS)): id = FOLDER_IDS[index] self.assertEqual(objects[index][0], id) self.assertEqual(objects[index][1], TEST_FOLDER) filename, text, content_type = context._wrote[2 + (2 * index)] self.assertEqual(filename, '/'.join(('structure', id, '.objects'))) self.assertEqual(content_type, 'text/comma-separated-values') subobjects = [x for x in reader(StringIO(text))] self.assertEqual(len(subobjects), 0) filename, text, content_type = context._wrote[2 + (2 * index) + 1] self.assertEqual(filename, '/'.join( ('structure', id, '.properties'))) self.assertEqual(content_type, 'text/plain') parser = ConfigParser() parser.readfp(StringIO(text)) self.assertEqual(parser.get('DEFAULT', 'Title'), 'Title: %s' % id) filename, text, content_type = context._wrote[1] self.assertEqual(filename, 'structure/.properties') self.assertEqual(content_type, 'text/plain') parser = ConfigParser() parser.readfp(StringIO(text)) self.assertEqual(parser.get('DEFAULT', 'title'), 'AAA') self.assertEqual(parser.get('DEFAULT', 'description'), 'DESCRIPTION')
def __init__(self): config_file = os.environ.get('CONFIG') if config_file is None: print('Environment variable "CONFIG" not defined.') sys.exit(0) config = ConfigParser() config.read(config_file) self.ABBREVIATION_MODEL = config.get('MODEL', 'ABBREVIATION_MODEL') self.WORD2VEC_MODEL = config.get('MODEL', 'WORD2VEC_MODEL')
def __init__(self, config=None): self.ioloop = IOLoop.current() self.node_id = None self.status = EXECUTOR_STATUS_OFFLINE if config is None: config = AgentConfig() self.config = config self.tags = config.get('tags') self.checktask_callback = None self.task_slots = TaskSlotContainer(config.getint('slots', 1)) server_base = config.get('server') if urlparse(server_base).scheme == '': if config.getint('server_https_port'): server_https_port = config.getint('server_https_port') server_base = 'https://%s:%d' % (server_base, server_https_port) else: server_base = 'http://%s:%d' % (server_base, config.getint('server_port')) self.service_base = server_base client_cert = config.get('client_cert') or None client_key = config.get('client_key') or None self.keep_job_files = config.getboolean('debug', False) LOGGER.debug('keep_job_files %s', self.keep_job_files) httpclient_defaults = { 'request_timeout': config.getfloat('request_timeout', 60) } if client_cert: httpclient_defaults['client_cert'] = client_cert if client_key: httpclient_defaults['client_key'] = client_key if os.path.exists('keys/ca.crt'): self.custom_ssl_cert = True httpclient_defaults['ca_certs'] = 'keys/ca.crt' httpclient_defaults['validate_cert'] = True LOGGER.debug(httpclient_defaults) node_key = None secret_key = None if os.path.exists('conf/node.conf'): parser = ConfigParser() parser.read('conf/node.conf') node_key = parser.get('agent', 'node_key') secret_key = parser.get('agent', 'secret_key') self.httpclient = NodeAsyncHTTPClient(self.service_base, key=node_key, secret_key=secret_key, defaults=httpclient_defaults) self.runner_factory = RunnerFactory(config)
def main(): ############################################################################# # # ARGUMENT PARSING # ############################################################################# parser = OptionParser(usage) parser.add_option( "-e", "--example", default=False, dest="example", action="store_true", help="Create example config.ini and an example sourcefile") (opts, args) = parser.parse_args() if opts.example: with open("omega_config.ini", "w") as f: f.write(ExampleConfig) with open("omegascanslist.txt", "w") as f: f.write(ExampleSourceFile) print( "Example files \"omega_config.ini\" and \"omegascanslist.txt\" are created" ) sys.exit(0) if len(args) != 1: parser.print_help() sys.exit("ERROR: Must provide one config.ini") cp = ConfigParser() cp.optionxform = str cp.readfp(open(args[0])) dag = OmegaScansDAG(cp) dag.write_sub_files() dag.write_dag() dag.write_script() #fix the sub and sh files #This is required because pipeline.py does not yet have the ability to add #a specific argument before all other arguments and options ('scan' in this case) fix_subfile(dag.submitFile) fix_scriptfile(cp.get('paths', 'basedir'), dag.get_dag_file(), cp.get('omegapipe', 'executable')) print('Successfully created DAG file.') fulldagpath = os.path.join(cp.get('paths', 'basedir'), dag.get_dag_file()) print('Now run condor_submit_dag %s\n' % (fulldagpath))
def get_anon_client(): """Simple ImgurClient that only has client credentials. An anonymous IgmurClient is not linked to a user account. """ config = ConfigParser() config.read('auth.ini') client_id = config.get('credentials', 'client_id') client_secret = config.get('credentials', 'client_secret') # removed for compatibility with python 2.7 # client_id = config.get('credentials', 'CLIENT_ID', fallback=None) # client_secret = config.get('credentials', 'CLIENT_SECRET', fallback=None) return ImgurClient(client_id, client_secret)
def GetConfigFromFile(cls, filename): cp = ConfigParser() assert filename in cp.read([filename]) config = cls() servers = cls.parse_multiline_options(cp.get("tests", "servers")) assert len(servers) == 2 and len(servers[0]) == 3 and len( servers[1]) == 3, "incorrect servers configuration" config.server1 = ClientInfo(*servers[0]) config.server2 = ClientInfo(*servers[1]) config.notify_email = cp.get("tests", "notify_email") config.alt_user = cp.get("tests", "alt_user") return config
def from_file(cls, filename, logger): config = ConfigParser(_check_defaults) if not config.read([filename]): raise CheckLoadError('Failed reading file', filename) _section = 'check' try: _ok_criteria = config.get(_section, 'ok') _warning_criteria = config.get(_section, 'warning') except Exception as exc: logger.exception(exc) raise CheckLoadError('Failed loading file', filename) return cls(_ok_criteria, _warning_criteria, filename, logger)
def read_config_file(cfgfile, options): config = ConfigParser() config.readfp(open(cfgfile)) if config.has_option('testflo', 'skip_dirs'): skips = config.get('testflo', 'skip_dirs') options.skip_dirs = [s.strip() for s in skips.split(',') if s.strip()] if config.has_option('testflo', 'num_procs'): options.num_procs = int(config.get('testflo', 'num_procs')) if config.has_option('testflo', 'noreport'): options.noreport = bool(config.get('testflo', 'noreport'))
def from_file(cls, filename, logger, runtime_mode=False): config = ConfigParser(_check_defaults) if not config.read([filename]): raise CheckLoadError('Failed reading file', filename) _section = 'check' try: _ok_criteria = config.get(_section, 'ok') _warning_criteria = config.get(_section, 'warning') except Exception as exc: logger.exception(exc) raise CheckLoadError('Failed loading file', filename) return cls(_ok_criteria, _warning_criteria, filename, logger, runtime_mode)
def read_printers(self): """get invalid/valid users from cups and samba config""" # read CUPS configuration if os.path.isfile(ShareConfiguration.CUPS_CONF): reg_cups = re.compile(r'\s*<Printer\s+([^>]+)>') with open("/etc/cups/printers.conf") as fd: for line in fd.readlines(): m_cups = reg_cups.match(line) if m_cups: prt = Printer(m_cups.group(1).strip()) self._printers[prt.name] = prt # samba if not os.path.exists(ShareConfiguration.PRINTERS_UDM_DIR): return for filename in os.listdir(ShareConfiguration.PRINTERS_UDM_DIR): cfg = ConfigParser() cfg.read( os.path.join(ShareConfiguration.PRINTERS_UDM_DIR, filename)) try: prt_name = cfg.sections()[0] except IndexError: continue prt = None if prt_name in self._printers: prt = self._printers[prt_name] else: if cfg.has_option(prt_name, 'printer name'): cups_name = cfg.get(prt_name, 'printer name') if cups_name in self._printers: prt = self._printers[cups_name] prt.smbname = prt_name if prt is None: continue if cfg.has_option(prt_name, Restrictions.INVALID_USERS): prt.invalid_users = shlex.split( cfg.get(prt_name, Restrictions.INVALID_USERS)) if cfg.has_option(prt_name, Restrictions.VALID_USERS): prt.valid_users = shlex.split( cfg.get(prt_name, Restrictions.VALID_USERS)) if cfg.has_option(prt_name, Restrictions.HOSTS_DENY): prt.hosts_deny = shlex.split( cfg.get(prt_name, Restrictions.HOSTS_DENY))
def setUp(self): rid = '60754-10' config = ConfigParser() p = '/Users/ross/Sandbox/pychron_validation_data.cfg' config.read(p) signals = [ list(map(float, x.split(','))) for x in [ config.get('Signals-{}'.format(rid), k) for k in ['ar40', 'ar39', 'ar38', 'ar37', 'ar36'] ] ] blanks = [ list(map(float, x.split(','))) for x in [ config.get('Blanks-{}'.format(rid), k) for k in ['ar40', 'ar39', 'ar38', 'ar37', 'ar36'] ] ] irradinfo = [ list(map(float, x.split(','))) for x in [ config.get('irrad-{}'.format(rid), k) for k in ['k4039', 'k3839', 'ca3937', 'ca3837', 'ca3637', 'cl3638'] ] ] j = config.get('irrad-{}'.format(rid), 'j') j = [float(x) for x in j.split(',')] baselines = [(0, 0), (0, 0), (0, 0), (0, 0), (0, 0)] backgrounds = [(0, 0), (0, 0), (0, 0), (0, 0), (0, 0)] ar37df = config.getfloat('irrad-{}'.format(rid), 'ar37df') t = math.log(ar37df) / (constants.lambda_37.nominal_value * 365.25) irradinfo.append(t) # load results r = 'results-{}'.format(rid) self.age = config.getfloat(r, 'age') self.rad4039 = config.getfloat(r, 'rad4039') self.ca37k39 = config.getfloat(r, 'ca37k39') self.age_dict = calculate_arar_age( signals, baselines, blanks, backgrounds, j, irradinfo, )
def load_theme(struct, path, colors, default_colors): theme = ConfigParser() with open(path, 'r') as f: theme.readfp(f) for k, v in chain(theme.items('syntax'), theme.items('interface')): if theme.has_option('syntax', k): colors[k] = theme.get('syntax', k) else: colors[k] = theme.get('interface', k) # Check against default theme to see if all values are defined for k, v in iteritems(default_colors): if k not in colors: colors[k] = v
def load(self): schemes = [defaultScheme] parser = ConfigParser() parser.read(settings.DASHBOARD_CONF) for option, default_value in defaultUIConfig.items(): if parser.has_option('ui', option): try: self.ui_config[option] = parser.getint('ui', option) except ValueError: self.ui_config[option] = parser.get('ui', option) else: self.ui_config[option] = default_value if parser.has_option('ui', 'automatic_variants'): self.ui_config['automatic_variants'] = parser.getboolean('ui', 'automatic_variants') else: self.ui_config['automatic_variants'] = True self.ui_config['keyboard_shortcuts'] = defaultKeyboardShortcuts.copy() if parser.has_section('keyboard-shortcuts'): self.ui_config['keyboard_shortcuts'].update( parser.items('keyboard-shortcuts') ) for section in parser.sections(): if section in ('ui', 'keyboard-shortcuts'): continue scheme = parser.get(section, 'scheme') fields = [] for match in fieldRegex.finditer(scheme): field = match.group(1) if parser.has_option(section, '%s.label' % field): label = parser.get(section, '%s.label' % field) else: label = field fields.append({ 'name' : field, 'label' : label }) schemes.append({ 'name' : section, 'pattern' : scheme, 'fields' : fields, }) self.schemes = schemes
def restore_rois(self, roifile): """restore ROI setting from ROI.dat file""" cp = ConfigParser() cp.read(roifile) rois = [] self.mcas[0].clear_rois() prefix = self.mcas[0]._prefix if prefix.endswith('.'): prefix = prefix[:-1] iroi = 0 for a in cp.options('rois'): if a.lower().startswith('roi'): name, dat = cp.get('rois', a).split('|') lims = [int(i) for i in dat.split()] lo, hi = lims[0], lims[1] # print('ROI ', name, lo, hi) roi = ROI(prefix=prefix, roi=iroi) roi.LO = lo roi.HI = hi roi.NM = name.strip() rois.append(roi) iroi += 1 poll(0.050, 1.0) self.mcas[0].set_rois(rois) cal0 = self.mcas[0].get_calib() for mca in self.mcas[1:]: mca.set_rois(rois, calib=cal0)
def get(self, section, key, default=""): try: return ConfigParser.get(self, section, key) except NoOptionError: return default except NoSectionError: return default
def getfloat(self, section, option, default = None): try: v = ConfigParser.get(self, section, option) return float(v) if v is not None else default except (NoSectionError, NoOptionError): return default
def _get_attach_points(self, info, size_request): has_attach_points_, attach_points = info.get_attach_points() attach_x = attach_y = 0 if attach_points: # this works only for Gtk < 3.14 # https://developer.gnome.org/gtk3/stable/GtkIconTheme.html # #gtk-icon-info-get-attach-points attach_x = float(attach_points[0].x) / size_request attach_y = float(attach_points[0].y) / size_request elif info.get_filename(): # try read from the .icon file icon_filename = info.get_filename().replace('.svg', '.icon') if icon_filename != info.get_filename() and \ os.path.exists(icon_filename): try: with open(icon_filename) as config_file: cp = ConfigParser() cp.readfp(config_file) attach_points_str = cp.get('Icon Data', 'AttachPoints') attach_points = attach_points_str.split(',') attach_x = float(attach_points[0].strip()) / 1000 attach_y = float(attach_points[1].strip()) / 1000 except Exception as e: logging.exception('Exception reading icon info: %s', e) return attach_x, attach_y
def get_stackstorm_version(): """ Return StackStorm version including git commit revision if running a dev release and a file with package metadata which includes git revision is available. :rtype: ``str`` """ if 'dev' in __version__: version = __version__ if not os.path.isfile(PACKAGE_METADATA_FILE_PATH): return version config = ConfigParser() try: config.read(PACKAGE_METADATA_FILE_PATH) except Exception: return version try: git_revision = config.get('server', 'git_sha') except Exception: return version version = '%s (%s)' % (version, git_revision) else: version = __version__ return version
def get_config(p): """Read a config file. :return: dict of ('section.option', value) pairs. """ cfg = {} parser = ConfigParser() if hasattr(parser, 'read_file'): parser.read_file(Path(p).open(encoding='utf8')) else: # pragma: no cover assert PY2 # The `read_file` method is not available on ConfigParser in py2.7! parser.readfp(Path(p).open(encoding='utf8')) for section in parser.sections(): getters = { 'int': partial(parser.getint, section), 'boolean': partial(parser.getboolean, section), 'float': partial(parser.getfloat, section), 'list': lambda option: parser.get(section, option).split(), } default = partial(parser.get, section) for option in parser.options(section): type_ = option.rpartition('_')[2] if '_' in option else None value = getters.get(type_, default)(option) cfg['{0}.{1}'.format(section, option)] = value return cfg
def get_config(p): """Read a config file. :return: dict of ('section.option', value) pairs. """ if not isinstance(p, Path): p = Path(p) cfg = {} parser = ConfigParser() parser.readfp(p.open(encoding='utf8')) for section in parser.sections(): getters = { 'int': partial(parser.getint, section), 'boolean': partial(parser.getboolean, section), 'float': partial(parser.getfloat, section), 'list': lambda option: parser.get(section, option).split(), } default = partial(parser.get, section) for option in parser.options(section): type_ = option.rpartition('_')[2] if '_' in option else None value = getters.get(type_, default)(option) cfg['{0}.{1}'.format(section, option)] = value return cfg
def loadKeysConfig(path=None): """Load keys config file. If path is ``None``, a file named :any:`DEFAULT_KEYS_FILE` will be looked for in the config directory. :param path: path of the keyboard configuration file """ if path is None: path = getConfigFilePath(DEFAULT_KEYS_FILE) cfg = ConfigParser() cfg.optionxform = str cfg.read([path]) for category in cfg.sections(): for actionName in cfg.options(category): keystr = cfg.get(category, actionName) context = Qt.WidgetShortcut if keystr.startswith('widget:'): keystr = keystr.split(':', 1)[1] elif keystr.startswith('window:'): keystr = keystr.split(':', 1)[1] context = Qt.WindowShortcut elif keystr.startswith('children:'): keystr = keystr.split(':', 1)[1] context = Qt.WidgetWithChildrenShortcut elif keystr.startswith('application:'): keystr = keystr.split(':', 1)[1] context = Qt.ApplicationShortcut qks = QKeySequence(keystr) registerActionShortcut(category, actionName, qks, context)
def reload_constraints(): """ Parse SWIFT_CONF_FILE and reset module level global constraint attrs, populating OVERRIDE_CONSTRAINTS AND EFFECTIVE_CONSTRAINTS along the way. """ global SWIFT_CONSTRAINTS_LOADED, OVERRIDE_CONSTRAINTS SWIFT_CONSTRAINTS_LOADED = False OVERRIDE_CONSTRAINTS = {} constraints_conf = ConfigParser() if constraints_conf.read(utils.SWIFT_CONF_FILE): SWIFT_CONSTRAINTS_LOADED = True for name in DEFAULT_CONSTRAINTS: try: value = constraints_conf.get('swift-constraints', name) except NoOptionError: pass except NoSectionError: # We are never going to find the section for another option break else: try: value = int(value) except ValueError: value = utils.list_from_csv(value) OVERRIDE_CONSTRAINTS[name] = value for name, default in DEFAULT_CONSTRAINTS.items(): value = OVERRIDE_CONSTRAINTS.get(name, default) EFFECTIVE_CONSTRAINTS[name] = value # "globals" in this context is module level globals, always. globals()[name.upper()] = value
def reload_constraints(): """ Parse SWIFT_CONF_FILE and reset module level global contraint attrs, populating OVERRIDE_CONSTRAINTS AND EFFECTIVE_CONSTRAINTS along the way. """ global SWIFT_CONSTRAINTS_LOADED, OVERRIDE_CONSTRAINTS SWIFT_CONSTRAINTS_LOADED = False OVERRIDE_CONSTRAINTS = {} constraints_conf = ConfigParser() if constraints_conf.read(utils.SWIFT_CONF_FILE): SWIFT_CONSTRAINTS_LOADED = True for name in DEFAULT_CONSTRAINTS: try: value = constraints_conf.get('swift-constraints', name) except NoOptionError: pass except NoSectionError: # We are never going to find the section for another option break else: try: value = int(value) except ValueError: value = utils.list_from_csv(value) OVERRIDE_CONSTRAINTS[name] = value for name, default in DEFAULT_CONSTRAINTS.items(): value = OVERRIDE_CONSTRAINTS.get(name, default) EFFECTIVE_CONSTRAINTS[name] = value # "globals" in this context is module level globals, always. globals()[name.upper()] = value
def pytest_collect_file(path, parent): """Handle running pylint on files discovered""" config = parent.config if not config.option.pylint: return if not path.ext == ".py": return # Find pylintrc to check ignore list pylintrc_file = config.option.pylint_rcfile or PYLINTRC # No pylintrc, therefore no ignores, so return the item. if not pylintrc_file or not exists(pylintrc_file): return PyLintItem(path, parent) pylintrc = ConfigParser() pylintrc.read(pylintrc_file) ignore_list = [] try: ignore_string = pylintrc.get('MASTER', 'ignore') if len(ignore_string) > 0: ignore_list = ignore_string.split(',') except (NoSectionError, NoOptionError): pass rel_path = path.strpath.replace(parent.fspath.strpath, '', 1)[1:] if not any(basename in rel_path for basename in ignore_list): return PyLintItem(path, parent)
def parse_config(config_file=DEFAULT_CONFIG_FILE): if not os.path.exists(config_file): config_dict = dict(DEFAULT_OPTIONS) config_dict['requirement_dev'] = config_dict['requirement'] return config_dict config_dict = {} config = ConfigParser(DEFAULT_OPTIONS) config.read(config_file) config_dict['requirement'] = config.get('pip-save', 'requirement') config_dict['use_compatible'] = config.getboolean('pip-save', 'use_compatible') config_dict['requirement_dev'] = config.get('pip-save', 'requirement_dev') return config_dict
def test_list_with_one_entry(self): name = self.make_empty_temp_file() to_config_file(name, "section.name", {"ports": ["port1"]}) self.assertTrue(os.path.isfile(name)) config = ConfigParser() config.read(name) self.assertListEqual(["port1"], config.get("section.name", "ports").split(","))
def test_list_with_no_entries(self): name = self.make_empty_temp_file() to_config_file(name, "section.name", {"ports": []}) self.assertTrue(os.path.isfile(name)) config = ConfigParser() config.read(name) self.assertEqual("", config.get("section.name", "ports"))
def serve(args): config = ConfigParser({'port': '8080', 'db': None}) config.read([CONFIG_FILE]) port = int(args.port or int(config.get('app', 'port'))) view_path = getattr(args, 'view_path', None) controller_path = getattr(args, 'controller_path', None) if args.cdn is None: if config.has_option('app', 'cdn'): cdn = config.getboolean('app', 'cdn') else: cdn = True else: cdn = args.cdn server.serve(db=config.get('app', 'db'), port=port, verbose=args.verbose, view_path=view_path, controller_path=controller_path, cdn=cdn)
def _parse_linfo(self, linfo_file): cp = ConfigParser() try: cp.readfp(linfo_file) section = 'Activity' if cp.has_option(section, 'name'): self._name = cp.get(section, 'name') if cp.has_option(section, 'summary'): self._summary = cp.get(section, 'summary') if cp.has_option(section, 'tags'): tag_list = cp.get(section, 'tags').strip(';') self._tags = [tag.strip() for tag in tag_list.split(';')] except ParsingError as e: logging.exception('Exception reading linfo file: %s', e)
class INIReader(object): """ConfigParser wrapper able to cast value when reading INI options.""" # Helper casters cast_boolean = casts.Boolean() cast_dict = casts.Dict() cast_list = casts.List() cast_logging_level = casts.LoggingLevel() cast_tuple = casts.Tuple() cast_webdriver_desired_capabilities = casts.WebdriverDesiredCapabilities() def __init__(self, path): self.config_parser = ConfigParser() with open(path) as handler: self.config_parser.readfp(handler) if sys.version_info[0] < 3: # ConfigParser.readfp is deprecated on Python3, read_file # replaces it self.config_parser.readfp(handler) else: self.config_parser.read_file(handler) def get(self, section, option, default=None, cast=None): """Read an option from a section of a INI file. The default value will return if the look up option is not available. The value will be cast using a callable if specified otherwise a string will be returned. :param section: Section to look for. :param option: Option to look for. :param default: The value that should be used if the option is not defined. :param cast: If provided the value will be cast using the cast provided. """ try: value = self.config_parser.get(section, option) if cast is not None: if cast is bool: value = self.cast_boolean(value) elif cast is dict: value = self.cast_dict(value) elif cast is list: value = self.cast_list(value) elif cast is tuple: value = self.cast_tuple(value) else: value = cast(value) except (NoSectionError, NoOptionError): value = default return value def has_section(self, section): """Check if section is available.""" return self.config_parser.has_section(section)
def main(): ############################################################################# # # ARGUMENT PARSING # ############################################################################# parser=OptionParser(usage) parser.add_option("-e","--example",default=False,dest="example",action="store_true",help="Create example config.ini and an example sourcefile") (opts,args) = parser.parse_args() if opts.example: with open("omega_config.ini","w") as f: f.write(ExampleConfig) with open("omegascanslist.txt","w") as f: f.write(ExampleSourceFile) print("Example files \"omega_config.ini\" and \"omegascanslist.txt\" are created") sys.exit(0) if len(args) != 1: parser.print_help() sys.exit("ERROR: Must provide one config.ini") cp=ConfigParser() cp.optionxform = str cp.readfp(open(args[0])) dag=OmegaScansDAG(cp) dag.write_sub_files() dag.write_dag() dag.write_script() #fix the sub and sh files #This is required because pipeline.py does not yet have the ability to add #a specific argument before all other arguments and options ('scan' in this case) fix_subfile(dag.submitFile) fix_scriptfile(cp.get('paths','basedir'),dag.get_dag_file(),cp.get('omegapipe','executable')) print('Successfully created DAG file.') fulldagpath=os.path.join(cp.get('paths','basedir'),dag.get_dag_file()) print('Now run condor_submit_dag %s\n'%(fulldagpath))
def _makeInstance(self, id, portal_type, subdir, import_context): context = self.context subdir = '%s/%s' % (subdir, id) properties = self.read_data_file(import_context, '.properties', subdir) tool = getUtility(ITypesTool) try: tool.constructContent(portal_type, context, id) except ValueError: # invalid type return None content = context._getOb(id) if properties is not None: if '[DEFAULT]' not in properties: try: adp = FolderishDAVAwareFileAdapter adp(content).import_(import_context, subdir) return content except (AttributeError, MethodNotAllowed): # Fall through to old implemenatation below pass lines = properties.splitlines() stream = StringIO('\n'.join(lines)) parser = ConfigParser(defaults={'title': '', 'description': 'NONE'}) try: parser.read_file(stream) except AttributeError: # Python 2 parser.readfp(stream) title = parser.get('DEFAULT', 'title') description = parser.get('DEFAULT', 'description') content.setTitle(title) content.setDescription(description) return content