def setUp(self): self.tmpdir = tempfile.mkdtemp() self.conffile = os.path.join(self.tmpdir, 'zeyple.conf') self.homedir = os.path.join(self.tmpdir, 'gpg') self.logfile = os.path.join(self.tmpdir, 'zeyple.log') config = ConfigParser() config.add_section('zeyple') config.set('zeyple', 'log_file', self.logfile) config.set('zeyple', 'add_header', 'true') config.add_section('gpg') config.set('gpg', 'home', self.homedir) config.add_section('relay') config.set('relay', 'host', 'example.net') config.set('relay', 'port', '2525') with open(self.conffile, 'w') as fp: config.write(fp) os.mkdir(self.homedir, 0o700) subprocess.check_call( ['gpg', '--homedir', self.homedir, '--import', KEYS_FNAME], stderr=open('/dev/null'), ) self.zeyple = zeyple.Zeyple(self.conffile) self.zeyple._send_message = Mock() # don't try to send emails
def _load_ec_as_default_policy(proxy_conf_file, swift_conf_file, **kwargs): """ Override swift.conf [storage-policy:0] section to use a 2+1 EC policy. :param proxy_conf_file: Source proxy conf filename :param swift_conf_file: Source swift conf filename :returns: Tuple of paths to the proxy conf file and swift conf file to use """ _debug('Setting configuration for default EC policy') conf = ConfigParser() conf.read(swift_conf_file) # remove existing policy sections that came with swift.conf-sample for section in list(conf.sections()): if section.startswith('storage-policy'): conf.remove_section(section) # add new policy 0 section for an EC policy conf.add_section('storage-policy:0') ec_policy_spec = { 'name': 'ec-test', 'policy_type': 'erasure_coding', 'ec_type': 'liberasurecode_rs_vand', 'ec_num_data_fragments': 2, 'ec_num_parity_fragments': 1, 'ec_object_segment_size': 1048576, 'default': True } for k, v in ec_policy_spec.items(): conf.set('storage-policy:0', k, str(v)) with open(swift_conf_file, 'w') as fp: conf.write(fp) return proxy_conf_file, swift_conf_file
def add_repo( self, name, uri, repo_type='rpm-md', prio=None, dist=None, components=None ): """ Add yum repository :param string name: repository base file name :param string uri: repository URI :param repo_type: repostory type name :param int prio: yum repostory priority :param dist: unused :param components: unused """ repo_file = self.shared_yum_dir['reposd-dir'] + '/' + name + '.repo' self.repo_names.append(name + '.repo') if os.path.exists(uri): # yum requires local paths to take the file: type uri = 'file://' + uri repo_config = ConfigParser() repo_config.add_section(name) repo_config.set( name, 'name', name ) repo_config.set( name, 'baseurl', uri ) if prio: repo_config.set( name, 'priority', format(prio) ) with open(repo_file, 'w') as repo: repo_config.write(repo)
def partition_defaultboot(partition): try: settings_path = partition_mount( config.DATA['system']['settings_partition'], 'rw') conf_path = os.path.join(settings_path, 'noobs.conf') noobs_conf = ConfigParser() noobs_conf.read(conf_path) section = 'General' if not noobs_conf.has_section(section): noobs_conf.add_section(section) if partition == config.DATA['system']['recovery_partition']: noobs_conf.remove_option(section, 'default_partition_to_boot') noobs_conf.remove_option(section, 'sticky_boot') else: noobs_conf.set(section, 'default_partition_to_boot', str(partition_number(partition))) noobs_conf.set(section, 'sticky_boot', str(partition_number(partition))) output = StringIO() noobs_conf.write(output) write_file(conf_path, output.getvalue()) except: raise finally: partition_umount(config.DATA['system']['settings_partition'])
def add_repo( self, name, uri, repo_type='rpm-md', prio=None, dist=None, components=None ): """ Add dnf repository :param string name: repository base file name :param string uri: repository URI :param repo_type: repostory type name :param int prio: dnf repostory priority :param dist: unused :param components: unused """ repo_file = self.shared_dnf_dir['reposd-dir'] + '/' + name + '.repo' self.repo_names.append(name + '.repo') if os.path.exists(uri): # dnf requires local paths to take the file: type uri = 'file://' + uri repo_config = ConfigParser() repo_config.add_section(name) repo_config.set( name, 'name', name ) repo_config.set( name, 'baseurl', uri ) if prio: repo_config.set( name, 'priority', format(prio) ) with open(repo_file, 'w') as repo: repo_config.write(repo)
def _save(self, nv): p = get_spectrometer_config_path() config = ConfigParser() config.read(p) config.set('CDDParameters', 'OperatingVoltage', nv) config.write(open(p, 'w')) self.info('saving new operating voltage {:0.1f} to {}'.format(nv, p))
def as_ini(self): """ """ context = self.context parser = ConfigParser() stream = cStringIO() for k, v in context.propertyItems(): parser.set('DEFAULT', k, str(v)) parser.write(stream) return stream.getvalue()
def open_buildout_configfile(filepath="buildout.cfg", write_on_exit=False): parser = ConfigParser() parser.read(filepath) try: yield parser finally: if not write_on_exit: return with open(filepath, 'w') as fd: parser.write(fd)
def make_oauth_client(base_url): """ Build an oauth.Client with which callers can query Allura. """ config_file = os.path.join(os.environ['HOME'], '.allurarc') cp = ConfigParser() cp.read(config_file) REQUEST_TOKEN_URL = base_url + '/rest/oauth/request_token' AUTHORIZE_URL = base_url + '/rest/oauth/authorize' ACCESS_TOKEN_URL = base_url + '/rest/oauth/access_token' oauth_key = option(cp, base_url, 'oauth_key', 'Forge API OAuth Key (%s/auth/oauth/): ' % base_url) oauth_secret = option(cp, base_url, 'oauth_secret', 'Forge API Oauth Secret: ') consumer = oauth.Consumer(oauth_key, oauth_secret) try: oauth_token = cp.get(base_url, 'oauth_token') oauth_token_secret = cp.get(base_url, 'oauth_token_secret') except NoOptionError: client = oauth.Client(consumer) resp, content = client.request(REQUEST_TOKEN_URL, 'GET') assert resp['status'] == '200', resp request_token = dict(six.moves.urllib.parse.parse_qsl(content)) pin_url = "%s?oauth_token=%s" % ( AUTHORIZE_URL, request_token['oauth_token']) if getattr(webbrowser.get(), 'name', '') == 'links': # sandboxes print(("Go to %s" % pin_url)) else: webbrowser.open(pin_url) oauth_verifier = input('What is the PIN? ') token = oauth.Token( request_token['oauth_token'], request_token['oauth_token_secret']) token.set_verifier(oauth_verifier) client = oauth.Client(consumer, token) resp, content = client.request(ACCESS_TOKEN_URL, "GET") access_token = dict(six.moves.urllib.parse.parse_qsl(content)) oauth_token = access_token['oauth_token'] oauth_token_secret = access_token['oauth_token_secret'] cp.set(base_url, 'oauth_token', oauth_token) cp.set(base_url, 'oauth_token_secret', oauth_token_secret) # save oauth token for later use cp.write(open(config_file, 'w')) print('Saving oauth tokens in {} for later re-use'.format(config_file)) print() access_token = oauth.Token(oauth_token, oauth_token_secret) oauth_client = oauth.Client(consumer, access_token) return oauth_client
def save_config(config): """Save configuration. :param config: Data to be written to the configuration file. :type config: dict """ config_parser = ConfigParser() config_parser.add_section("sublime") if len(config) == 0: click.echo( 'Error: no options provided. Try "sublime setup -h" for help.') click.get_current_context().exit(-1) # If either value was not specified, load the existing values saved # to ensure we don't overwrite their values to null here saved_config = load_config() if 'api_key' not in config or not config['api_key']: config['api_key'] = saved_config['api_key'] if 'save_dir' not in config or not config['save_dir']: config['save_dir'] = saved_config['save_dir'] if 'permission' not in config or not config['permission']: config['permission'] = saved_config['permission'] if config["save_dir"] and not os.path.isdir(config["save_dir"]): click.echo("Error: save directory is not a valid directory") click.get_current_context().exit(-1) config_parser.set("sublime", "api_key", config["api_key"]) config_parser.set("sublime", "save_dir", config["save_dir"]) config_parser.set("sublime", "permission", config["permission"]) config_parser_existing = ConfigParser() if os.path.isfile(CONFIG_FILE): # LOGGER.debug("Reading configuration file: %s...", CONFIG_FILE, path=CONFIG_FILE) with open(CONFIG_FILE) as config_file: config_parser_existing.readfp(config_file) # if an emailrep key exists, ensure we don't overwrite it try: emailrep_key = config_parser_existing.get("emailrep", "key") if emailrep_key: config_parser.add_section("emailrep") config_parser.set("emailrep", "key", emailrep_key) except: pass config_dir = os.path.dirname(CONFIG_FILE) if not os.path.isdir(config_dir): os.makedirs(config_dir) with open(CONFIG_FILE, "w") as config_file: config_parser.write(config_file)
def dump(self): p = get_spectrometer_config_path() cfp = ConfigParser() cfp.read(p) for gn, pn, v in self.itervalues(): cfp.set(gn, pn, v) with open(p, 'w') as wfile: cfp.write(wfile) return p
def stayloggedin(self): """ handles timeout constraints of the link before exiting """ config = ConfigParser() config.read(self.SessionFile) config['DEFAULT']['lastcommandtime'] = repr(time()) with open(self.SessionFile, 'w') as configfile: config.write(configfile) self.link._do_delay() logging.debug('Staying logged in') return
def _modify_wpr_file(template, outfile, version): config = ConfigParser() config.read(template) if sys.platform == 'darwin': config.set('user attributes', 'proj.pyexec', text_type(dict({None: ('custom', sys.executable)}))) config.set('user attributes', 'proj.pypath', text_type(dict({None: ('custom',os.pathsep.join(sys.path))}))) with open(outfile, 'w') as fp: fp.write('#!wing\n#!version=%s\n' % version) config.write(fp)
class OSCAPConfig(object): _config_file = SCAP_BASEDIR + "/config" _section = "openscap" _profile = "profile" _datastream = "datastream" _configured = "configured" def __init__(self): self._cp = ConfigParser() self._cp.read(self._config_file) if not self._cp.has_section(self._section): self._cp.add_section(self._section) def _set_value(self, key, val): self._cp.set(self._section, key, val) self._save() def _get_value(self, key, default=None): if not self._cp.has_option(self._section, key): return default return self._cp.get(self._section, key) def _save(self): with open(self._config_file, "w") as f: self._cp.write(f) @property def profile(self): return self._get_value(self._profile) @profile.setter def profile(self, value): return self._set_value(self._profile, value) @property def datastream(self): return self._get_value(self._datastream) @datastream.setter def datastream(self, value): return self._set_value(self._datastream, value) @property def registered(self): return bool(self.profile and self.datastream) @property def configured(self): return self._get_value(self._configured) @configured.setter def configured(self, value): return self._set_value(self._configured, value)
def _load_domain_remap_staticweb(proxy_conf_file, swift_conf_file, **kwargs): """ Load domain_remap and staticweb into proxy server pipeline. :param proxy_conf_file: Source proxy conf filename :param swift_conf_file: Source swift conf filename :returns: Tuple of paths to the proxy conf file and swift conf file to use :raises InProcessException: raised if proxy conf contents are invalid """ _debug('Setting configuration for domain_remap') # add a domain_remap storage_domain to the test configuration storage_domain = 'example.net' global config config['storage_domain'] = storage_domain # The global conf dict cannot be used to modify the pipeline. # The pipeline loader requires the pipeline to be set in the local_conf. # If pipeline is set in the global conf dict (which in turn populates the # DEFAULTS options) then it prevents pipeline being loaded into the local # conf during wsgi load_app. # Therefore we must modify the [pipeline:main] section. conf = ConfigParser() conf.read(proxy_conf_file) try: section = 'pipeline:main' old_pipeline = conf.get(section, 'pipeline') pipeline = old_pipeline.replace( " tempauth ", " tempauth staticweb ") pipeline = pipeline.replace( " listing_formats ", " domain_remap listing_formats ") if pipeline == old_pipeline: raise InProcessException( "Failed to insert domain_remap and staticweb into pipeline: %s" % old_pipeline) conf.set(section, 'pipeline', pipeline) # set storage_domain in domain_remap middleware to match test config section = 'filter:domain_remap' conf.set(section, 'storage_domain', storage_domain) except NoSectionError as err: msg = 'Error problem with proxy conf file %s: %s' % \ (proxy_conf_file, err) raise InProcessException(msg) test_conf_file = os.path.join(_testdir, 'proxy-server.conf') with open(test_conf_file, 'w') as fp: conf.write(fp) return test_conf_file, swift_conf_file
def add_repo( self, name, uri, repo_type='rpm-md', prio=None, dist=None, components=None, user=None, secret=None, credentials_file=None, repo_gpgcheck=None, pkg_gpgcheck=None ): """ Add yum repository :param string name: repository base file name :param string uri: repository URI :param repo_type: repostory type name :param int prio: yum repostory priority :param dist: unused :param components: unused :param user: unused :param secret: unused :param credentials_file: unused :param bool repo_gpgcheck: enable repository signature validation :param bool pkg_gpgcheck: enable package signature validation """ repo_file = self.shared_yum_dir['reposd-dir'] + '/' + name + '.repo' self.repo_names.append(name + '.repo') if os.path.exists(uri): # yum requires local paths to take the file: type uri = 'file://' + uri repo_config = ConfigParser() repo_config.add_section(name) repo_config.set( name, 'name', name ) repo_config.set( name, 'baseurl', uri ) repo_config.set( name, 'enabled', '1' ) if prio: repo_config.set( name, 'priority', format(prio) ) if repo_gpgcheck is not None: repo_config.set( name, 'repo_gpgcheck', '1' if repo_gpgcheck else '0' ) if pkg_gpgcheck is not None: repo_config.set( name, 'gpgcheck', '1' if pkg_gpgcheck else '0' ) with open(repo_file, 'w') as repo: repo_config.write(RepositoryYumSpaceRemover(repo))
def _load_domain_remap_staticweb(proxy_conf_file, swift_conf_file, **kwargs): """ Load domain_remap and staticweb into proxy server pipeline. :param proxy_conf_file: Source proxy conf filename :param swift_conf_file: Source swift conf filename :returns: Tuple of paths to the proxy conf file and swift conf file to use :raises InProcessException: raised if proxy conf contents are invalid """ _debug('Setting configuration for domain_remap') # add a domain_remap storage_domain to the test configuration storage_domain = 'example.net' global config config['storage_domain'] = storage_domain # The global conf dict cannot be used to modify the pipeline. # The pipeline loader requires the pipeline to be set in the local_conf. # If pipeline is set in the global conf dict (which in turn populates the # DEFAULTS options) then it prevents pipeline being loaded into the local # conf during wsgi load_app. # Therefore we must modify the [pipeline:main] section. conf = ConfigParser() conf.read(proxy_conf_file) try: section = 'pipeline:main' old_pipeline = conf.get(section, 'pipeline') pipeline = old_pipeline.replace( " tempauth ", " domain_remap tempauth staticweb ") if pipeline == old_pipeline: raise InProcessException( "Failed to insert domain_remap and staticweb into pipeline: %s" % old_pipeline) conf.set(section, 'pipeline', pipeline) # set storage_domain in domain_remap middleware to match test config section = 'filter:domain_remap' conf.set(section, 'storage_domain', storage_domain) except NoSectionError as err: msg = 'Error problem with proxy conf file %s: %s' % \ (proxy_conf_file, err) raise InProcessException(msg) test_conf_file = os.path.join(_testdir, 'proxy-server.conf') with open(test_conf_file, 'w') as fp: conf.write(fp) return test_conf_file, swift_conf_file
def setUp(self): """ Reset all environment and start all servers. """ super(TestDarkDataDeletion, self).setUp() self.conf_dest = \ os.path.join('/tmp/', datetime.now().strftime('swift-%Y-%m-%d_%H-%M-%S-%f')) os.mkdir(self.conf_dest) object_server_dir = os.path.join(self.conf_dest, 'object-server') os.mkdir(object_server_dir) for conf_file in Server('object-auditor').conf_files(): config = readconf(conf_file) if 'object-auditor' not in config: continue # *somebody* should be set up to run the auditor config['object-auditor'].update({'watchers': 'swift#dark_data'}) # Note that this setdefault business may mean the watcher doesn't # pick up DEFAULT values, but that (probably?) won't matter. # We set grace_age to 0 so that tests don't have to deal with time. config.setdefault(CONF_SECTION, {}).update({ 'action': self.action, 'grace_age': "0" }) parser = ConfigParser() for section in ('object-auditor', CONF_SECTION): parser.add_section(section) for option, value in config[section].items(): parser.set(section, option, value) file_name = os.path.basename(conf_file) if file_name.endswith('.d'): # Work around conf.d setups (like you might see with VSAIO) file_name = file_name[:-2] with open(os.path.join(object_server_dir, file_name), 'w') as fp: parser.write(fp) self.container_name = 'container-%s' % uuid.uuid4() self.object_name = 'object-%s' % uuid.uuid4() self.brain = BrainSplitter(self.url, self.token, self.container_name, self.object_name, 'object', policy=self.policy)
def pretty_format_ini(argv=None): # type: (typing.Optional[typing.List[typing.Text]]) -> int parser = argparse.ArgumentParser() parser.add_argument( "--autofix", action="store_true", dest="autofix", help="Automatically fixes encountered not-pretty-formatted files", ) parser.add_argument("filenames", nargs="*", help="Filenames to fix") args = parser.parse_args(argv) status = 0 for ini_file in set(args.filenames): with open(ini_file) as input_file: string_content = "".join(input_file.readlines()) config_parser = ConfigParser() try: if PY3: # pragma: no cover # py3+ only config_parser.read_string(string_content) else: # pragma: no cover # py27 only config_parser.readfp(StringIO(str(string_content))) pretty_content = StringIO() config_parser.write(pretty_content) pretty_content_str = remove_trailing_whitespaces_and_set_new_line_ending( pretty_content.getvalue(), ) if string_content != pretty_content_str: print("File {} is not pretty-formatted".format(ini_file)) if args.autofix: print("Fixing file {}".format(ini_file)) with io.open(ini_file, "w", encoding="UTF-8") as output_file: output_file.write(text_type(pretty_content_str)) status = 1 except Error: print("Input File {} is not a valid INI file".format(ini_file)) return 1 return status
def _install_desktop_file(self, destdir, prefix, activity_path): cp = ConfigParser() section = 'Desktop Entry' cp.add_section(section) cp.optionxform = str # Allow CamelCase entries # Get it from the activity.info for the non-translated version info = ConfigParser() info_path = os.path.join(destdir, os.path.relpath(activity_path, '/'), 'activity', 'activity.info') info.read(info_path) cp.set(section, 'Name', info.get('Activity', 'name')) if info.has_option('Activity', 'summary'): cp.set(section, 'Comment', info.get('Activity', 'summary')) for path in sorted( glob( os.path.join(activity_path, 'locale', '*', 'activity.linfo'))): locale = path.split(os.path.sep)[-2] info = ConfigParser() info.read(path) if info.has_option('Activity', 'name'): cp.set(section, 'Name[{}]'.format(locale), info.get('Activity', 'name')) if info.has_option('Activity', 'summary'): cp.set(section, 'Comment[{}]'.format(locale), info.get('Activity', 'summary')) cp.set(section, 'Terminal', 'false') cp.set(section, 'Type', 'Application') cp.set(section, 'Categories', 'Education;') cp.set( section, 'Icon', os.path.join(activity_path, 'activity', self.config.bundle.get_icon_filename())) cp.set(section, 'Exec', self.config.bundle.get_command()) cp.set(section, 'Path', activity_path) # Path == CWD for running name = '{}.activity.desktop'.format(self.config.bundle_id) path = os.path.join(destdir, os.path.relpath(prefix, '/'), 'share', 'applications', name) if not os.path.isdir(os.path.dirname(path)): os.makedirs(os.path.dirname(path)) with open(path, 'w') as f: cp.write(f) print('Install %s' % (path))
def pretty_format_ini(argv=None): parser = argparse.ArgumentParser() parser.add_argument( '--autofix', action='store_true', dest='autofix', help='Automatically fixes encountered not-pretty-formatted files', ) parser.add_argument('filenames', nargs='*', help='Filenames to fix') args = parser.parse_args(argv) status = 0 for ini_file in set(args.filenames): with open(ini_file) as f: string_content = ''.join(f.readlines()) config_parser = ConfigParser() try: if PY3: # pragma: no cover # py3+ only config_parser.read_string(string_content) else: # pragma: no cover # py27 only config_parser.readfp(StringIO(string_content)) pretty_content = StringIO() config_parser.write(pretty_content) pretty_content_str = remove_trailing_whitespaces_and_set_new_line_ending( pretty_content.getvalue(), ) if string_content != pretty_content_str: print('File {} is not pretty-formatted'.format(ini_file)) if args.autofix: print('Fixing file {}'.format(ini_file)) with io.open(ini_file, 'w', encoding='UTF-8') as f: f.write(text_type(pretty_content_str)) status = 1 except Error: print('Input File {} is not a valid INI file'.format(ini_file)) return 1 return status
def login(conf): if conf.cli_args.token: token = conf.cli_args.token else: token = getpass.getpass('Token: ') flagrc_path = os.path.join(os.path.expanduser(conf.cli_args.config)) print("Writing token to {}".format(flagrc_path)) flagrc = ConfigParser() flagrc.read(flagrc_path) if not flagrc.has_section('iscore'): flagrc.add_section('iscore') flagrc.set('iscore', 'api_token', token) with open(flagrc_path, 'w') as fp: flagrc.write(fp)
def save_config(config): """Save configuration. :param config: Data to be written to the configuration file. :type config: dict """ config_parser = ConfigParser() config_parser.add_section("greynoise") config_parser.set("greynoise", "api_key", config["api_key"]) config_dir = os.path.dirname(CONFIG_FILE) if not os.path.isdir(config_dir): os.makedirs(config_dir) with open(CONFIG_FILE, "w") as config_file: config_parser.write(config_file)
def run(self, args, **kwargs): if not args.password: args.password = getpass.getpass() instance = self.resource(ttl=args.ttl) if args.ttl else self.resource() cli = BaseCLIApp() # Determine path to config file try: config_file = cli._get_config_file_path(args) except ValueError: # config file not found in args or in env, defaulting config_file = config_parser.ST2_CONFIG_PATH # Retrieve token manager = self.manager.create(instance, auth=(args.username, args.password), **kwargs) cli._cache_auth_token(token_obj=manager) # Update existing configuration with new credentials config = ConfigParser() config.read(config_file) # Modify config (and optionally populate with password) if not config.has_section('credentials'): config.add_section('credentials') config.set('credentials', 'username', args.username) if args.write_password: config.set('credentials', 'password', args.password) else: # Remove any existing password from config config.remove_option('credentials', 'password') config_existed = os.path.exists(config_file) with open(config_file, 'w') as cfg_file_out: config.write(cfg_file_out) # If we created the config file, correct the permissions if not config_existed: os.chmod(config_file, 0o660) return manager
def logout(self, cutConnection=True, callback=None): """ Log out from AniDB UDP API """ config = ConfigParser() config.read(self.SessionFile) if config['DEFAULT']['loggedin'] == 'yes': self.link.session = config.get('DEFAULT', 'sessionkey') result = self.handle(LogoutCommand(), callback) if cutConnection: self.cut() config['DEFAULT']['loggedin'] = 'no' with open(self.SessionFile, 'w') as configfile: config.write(configfile) logging.debug('Logging out') return result logging.debug('Not logging out') return
def _get_config_object(config_path, use_cashed_config=True): ''' Returns a ConfigParser for the config file at the given path. If no file exists, an empty config file is created. :param config_path: :param use_cashed_config: If set to True, will return the previously created ConfigParser file (if previously created). If set to False, will re-read the config file from disk. If a ConfigParser was previously created, it will not be replaced! :return: ''' if config_path not in _CONFIG_OBJECTS or not use_cashed_config: config = ConfigParser() if not os.path.exists(config_path): with open(config_path,'w') as f: config.write(f) else: config.read(config_path) if use_cashed_config: _CONFIG_OBJECTS[config_path] = config else: config = _CONFIG_OBJECTS[config_path] return config
def _get_config_object(config_path, use_cashed_config=True): ''' Returns a ConfigParser for the config file at the given path. If no file exists, an empty config file is created. :param config_path: :param use_cashed_config: If set to True, will return the previously created ConfigParser file (if previously created). If set to False, will re-read the config file from disk. If a ConfigParser was previously created, it will not be replaced! :return: ''' if config_path not in _CONFIG_OBJECTS or not use_cashed_config: config = ConfigParser() if not os.path.exists(config_path): with open(config_path, 'w') as f: config.write(f) else: config.read(config_path) if use_cashed_config: _CONFIG_OBJECTS[config_path] = config else: config = _CONFIG_OBJECTS[config_path] return config
def _in_process_setup_swift_conf(swift_conf_src, testdir): # override swift.conf contents for in-process functional test runs conf = ConfigParser() conf.read(swift_conf_src) try: section = 'swift-hash' conf.set(section, 'swift_hash_path_suffix', 'inprocfunctests') conf.set(section, 'swift_hash_path_prefix', 'inprocfunctests') section = 'swift-constraints' max_file_size = (8 * 1024 * 1024) + 2 # 8 MB + 2 conf.set(section, 'max_file_size', str(max_file_size)) except NoSectionError: msg = 'Conf file %s is missing section %s' % (swift_conf_src, section) raise InProcessException(msg) test_conf_file = os.path.join(testdir, 'swift.conf') with open(test_conf_file, 'w') as fp: conf.write(fp) return test_conf_file
def _in_process_setup_swift_conf(swift_conf_src, testdir): # override swift.conf contents for in-process functional test runs conf = ConfigParser() conf.read(swift_conf_src) try: section = "swift-hash" conf.set(section, "swift_hash_path_suffix", "inprocfunctests") conf.set(section, "swift_hash_path_prefix", "inprocfunctests") section = "swift-constraints" max_file_size = (8 * 1024 * 1024) + 2 # 8 MB + 2 conf.set(section, "max_file_size", max_file_size) except NoSectionError: msg = "Conf file %s is missing section %s" % (swift_conf_src, section) raise InProcessException(msg) test_conf_file = os.path.join(testdir, "swift.conf") with open(test_conf_file, "w") as fp: conf.write(fp) return test_conf_file
def _in_process_setup_swift_conf(swift_conf_src, testdir): # override swift.conf contents for in-process functional test runs conf = ConfigParser() conf.read(swift_conf_src) try: section = 'swift-hash' conf.set(section, 'swift_hash_path_suffix', 'inprocfunctests') conf.set(section, 'swift_hash_path_prefix', 'inprocfunctests') section = 'swift-constraints' max_file_size = (8 * 1024 * 1024) + 2 # 8 MB + 2 conf.set(section, 'max_file_size', max_file_size) except NoSectionError: msg = 'Conf file %s is missing section %s' % (swift_conf_src, section) raise InProcessException(msg) test_conf_file = os.path.join(testdir, 'swift.conf') with open(test_conf_file, 'w') as fp: conf.write(fp) return test_conf_file
def _update_fstab(newroot): newfstab = Fstab("%s/etc/fstab" % newroot) if not newfstab.exists(): log.info("The new layer contains no fstab, skipping.") return log.debug("Checking new fstab: %s" % newfstab) log.info("Updating fstab of new layer") rootentry = newfstab.by_target("/") rootentry.source = new_lv.path newfstab.update(rootentry) # Ensure that discard is used # This can also be done in anaconda once it is fixed targets = list(constants.volume_paths().keys()) + ["/"] for tgt in targets: try: e = newfstab.by_target(tgt) if "discard" not in e.options: e.options += ["discard"] newfstab.update(e) except KeyError: # Created with imgbased.volume? log.debug("{} not found in /etc/fstab. " "not created by Anaconda".format(tgt)) from six.moves.configparser import ConfigParser c = ConfigParser() c.optionxform = str sub = re.sub(r'^/', '', tgt) sub = re.sub(r'/', '-', sub) fname = "{}/etc/systemd/system/{}.mount".format(newroot, sub) c.read(fname) if 'discard' not in c.get('Mount', 'Options'): c.set('Mount', 'Options', ','.join([c.get('Mount', 'Options'), 'discard'])) with open(fname, 'w') as mountfile: c.write(mountfile)
def process_mistral_config(config_path): """ Remove sensitive data (credentials) from the Mistral config. :param config_path: Full absolute path to the mistral config inside /tmp. :type config_path: ``str`` """ assert config_path.startswith('/tmp') if not os.path.isfile(config_path): return config = ConfigParser() config.read(config_path) for section, options in MISTRAL_CONF_OPTIONS_TO_REMOVE.items(): for option in options: if config.has_option(section, option): config.set(section, option, REMOVED_VALUE_NAME) with open(config_path, 'w') as fp: config.write(fp)
def _load_encryption(proxy_conf_file, swift_conf_file, **kwargs): """ Load encryption configuration and override proxy-server.conf contents. :param proxy_conf_file: Source proxy conf filename :param swift_conf_file: Source swift conf filename :returns: Tuple of paths to the proxy conf file and swift conf file to use :raises InProcessException: raised if proxy conf contents are invalid """ _debug('Setting configuration for encryption') # The global conf dict cannot be used to modify the pipeline. # The pipeline loader requires the pipeline to be set in the local_conf. # If pipeline is set in the global conf dict (which in turn populates the # DEFAULTS options) then it prevents pipeline being loaded into the local # conf during wsgi load_app. # Therefore we must modify the [pipeline:main] section. conf = ConfigParser() conf.read(proxy_conf_file) try: section = 'pipeline:main' pipeline = conf.get(section, 'pipeline') pipeline = pipeline.replace( "proxy-logging proxy-server", "keymaster encryption proxy-logging proxy-server") conf.set(section, 'pipeline', pipeline) root_secret = os.urandom(32).encode("base64") conf.set('filter:keymaster', 'encryption_root_secret', root_secret) except NoSectionError as err: msg = 'Error problem with proxy conf file %s: %s' % \ (proxy_conf_file, err) raise InProcessException(msg) test_conf_file = os.path.join(_testdir, 'proxy-server.conf') with open(test_conf_file, 'w') as fp: conf.write(fp) return test_conf_file, swift_conf_file
def _install_desktop_file(self, prefix, activity_path): cp = ConfigParser() section = 'Desktop Entry' cp.add_section(section) cp.optionxform = str # Allow CamelCase entries # Get it from the activity.info for the non-translated version info = ConfigParser() info.read(os.path.join(activity_path, 'activity', 'activity.info')) cp.set(section, 'Name', info.get('Activity', 'name')) if info.has_option('Activity', 'summary'): cp.set(section, 'Comment', info.get('Activity', 'summary')) for path in sorted(glob(os.path.join(activity_path, 'locale', '*', 'activity.linfo'))): locale = path.split(os.path.sep)[-2] info = ConfigParser() info.read(path) if info.has_option('Activity', 'name'): cp.set(section, 'Name[{}]'.format(locale), info.get('Activity', 'name')) if info.has_option('Activity', 'summary'): cp.set(section, 'Comment[{}]'.format(locale), info.get('Activity', 'summary')) cp.set(section, 'Terminal', 'false') cp.set(section, 'Type', 'Application') cp.set(section, 'Categories', 'Education;') cp.set(section, 'Icon', os.path.join( activity_path, 'activity', self.config.bundle.get_icon_filename())) cp.set(section, 'Exec', self.config.bundle.get_command()) cp.set(section, 'Path', activity_path) # Path == CWD for running name = '{}.activity.desktop'.format(self.config.bundle_id) path = os.path.join(prefix, 'share', 'applications', name) if not os.path.isdir(os.path.dirname(path)): os.makedirs(os.path.dirname(path)) with open(path, 'w') as f: cp.write(f)
def run(self, args, **kwargs): if not args.password: args.password = getpass.getpass() instance = self.resource(ttl=args.ttl) if args.ttl else self.resource() cli = BaseCLIApp() # Determine path to config file try: config_file = cli._get_config_file_path(args) except ValueError: # config file not found in args or in env, defaulting config_file = config_parser.ST2_CONFIG_PATH # Retrieve token manager = self.manager.create(instance, auth=(args.username, args.password), **kwargs) cli._cache_auth_token(token_obj=manager) # Update existing configuration with new credentials config = ConfigParser() config.read(config_file) # Modify config (and optionally populate with password) if not config.has_section('credentials'): config.add_section('credentials') config.set('credentials', 'username', args.username) if args.write_password: config.set('credentials', 'password', args.password) else: # Remove any existing password from config config.remove_option('credentials', 'password') with open(config_file, 'w') as cfg_file_out: config.write(cfg_file_out) return manager
def merge(): """ Merges all .ini files in a specified directory into a file (defaults to ./config/galaxy.ini ). """ if len(argv) < 2: message = "%s: Must specify directory to merge configuration files from." % argv[0] raise Exception(message) conf_directory = argv[1] conf_files = [f for f in listdir(conf_directory) if match(r'.*\.ini', f)] conf_files.sort() parser = ConfigParser() for conf_file in conf_files: parser.read([join(conf_directory, conf_file)]) # TODO: Expand enviroment variables here, that would # also make Galaxy much easier to configure. destination = "config/galaxy.ini" if len(argv) > 2: destination = argv[2] parser.write(open(destination, 'w'))
def setup_config(args): config_path = os.path.expanduser(args.config_path) cp = ConfigParser() cp.add_section("common") cp.set('common', 'secret_id', args.secret_id) cp.set('common', 'secret_key', args.secret_key) cp.set('common', 'bucket', args.bucket) cp.set('common', 'region', args.region) cp.set('common', 'max_thread', str(args.max_thread)) cp.set('common', 'part_size', str(args.part_size)) cp.add_section("cosutil") if args.no_prefix: prefix_type = 'absence' elif args.relative_prefix: prefix_type = 'relative' else: prefix_type = 'fixed' cp.set('cosutil', 'fixed_prefix', args.fixed_prefix) cp.set('cosutil', 'prefix_type', prefix_type) cp.write(open(config_path, 'w+'))
def merge(): """ Merges all .ini files in a specified directory into a file (defaults to ./config/galaxy.ini ). """ if len(argv) < 2: message = "%s: Must specify directory to merge configuration files from." % argv[ 0] raise Exception(message) conf_directory = argv[1] conf_files = [f for f in listdir(conf_directory) if match(r'.*\.ini', f)] conf_files.sort() parser = ConfigParser() for conf_file in conf_files: parser.read([join(conf_directory, conf_file)]) # TODO: Expand enviroment variables here, that would # also make Galaxy much easier to configure. destination = "config/galaxy.ini" if len(argv) > 2: destination = argv[2] parser.write(open(destination, 'w'))
def _in_process_setup_ring(swift_conf, conf_src_dir, testdir): """ If SWIFT_TEST_POLICY is set: - look in swift.conf file for specified policy - move this to be policy-0 but preserving its options - copy its ring file to test dir, changing its devices to suit in process testing, and renaming it to suit policy-0 Otherwise, create a default ring file. """ conf = ConfigParser() conf.read(swift_conf) sp_prefix = 'storage-policy:' try: # policy index 0 will be created if no policy exists in conf policies = parse_storage_policies(conf) except PolicyError as e: raise InProcessException(e) # clear all policies from test swift.conf before adding test policy back for policy in policies: conf.remove_section(sp_prefix + str(policy.idx)) if policy_specified: policy_to_test = policies.get_by_name(policy_specified) if policy_to_test is None: raise InProcessException('Failed to find policy name "%s"' % policy_specified) _info('Using specified policy %s' % policy_to_test.name) else: policy_to_test = policies.default _info('Defaulting to policy %s' % policy_to_test.name) # make policy_to_test be policy index 0 and default for the test config sp_zero_section = sp_prefix + '0' conf.add_section(sp_zero_section) for (k, v) in policy_to_test.get_info(config=True).items(): conf.set(sp_zero_section, k, str(v)) conf.set(sp_zero_section, 'default', 'True') with open(swift_conf, 'w') as fp: conf.write(fp) # look for a source ring file ring_file_src = ring_file_test = 'object.ring.gz' if policy_to_test.idx: ring_file_src = 'object-%s.ring.gz' % policy_to_test.idx try: ring_file_src = _in_process_find_conf_file(conf_src_dir, ring_file_src, use_sample=False) except InProcessException as e: if policy_specified: raise InProcessException('Failed to find ring file %s' % ring_file_src) ring_file_src = None ring_file_test = os.path.join(testdir, ring_file_test) if ring_file_src: # copy source ring file to a policy-0 test ring file, re-homing servers _info('Using source ring file %s' % ring_file_src) ring_data = ring.RingData.load(ring_file_src) obj_sockets = [] for dev in ring_data.devs: device = 'sd%c1' % chr(len(obj_sockets) + ord('a')) utils.mkdirs(os.path.join(_testdir, 'sda1')) utils.mkdirs(os.path.join(_testdir, 'sda1', 'tmp')) obj_socket = listen_zero() obj_sockets.append(obj_socket) dev['port'] = obj_socket.getsockname()[1] dev['ip'] = '127.0.0.1' dev['device'] = device dev['replication_port'] = dev['port'] dev['replication_ip'] = dev['ip'] ring_data.save(ring_file_test) else: # make default test ring, 3 replicas, 4 partitions, 3 devices # which will work for a replication policy or a 2+1 EC policy _info('No source object ring file, creating 3rep/4part/3dev ring') obj_sockets = [listen_zero() for _ in (0, 1, 2)] replica2part2dev_id = [[0, 1, 2, 0], [1, 2, 0, 1], [2, 0, 1, 2]] devs = [{'id': 0, 'zone': 0, 'device': 'sda1', 'ip': '127.0.0.1', 'port': obj_sockets[0].getsockname()[1]}, {'id': 1, 'zone': 1, 'device': 'sdb1', 'ip': '127.0.0.1', 'port': obj_sockets[1].getsockname()[1]}, {'id': 2, 'zone': 2, 'device': 'sdc1', 'ip': '127.0.0.1', 'port': obj_sockets[2].getsockname()[1]}] ring_data = ring.RingData(replica2part2dev_id, devs, 30) with closing(GzipFile(ring_file_test, 'wb')) as f: pickle.dump(ring_data, f) for dev in ring_data.devs: _debug('Ring file dev: %s' % dev) return obj_sockets
def add_repo( self, name, uri, repo_type='rpm-md', prio=None, dist=None, components=None, user=None, secret=None, credentials_file=None, repo_gpgcheck=None, pkg_gpgcheck=None ): """ Add zypper repository :param string name: repository name :param string uri: repository URI :param repo_type: repostory type name :param int prio: zypper repostory priority :param dist: unused :param components: unused :param user: credentials username :param secret: credentials password :param credentials_file: zypper credentials file :param bool repo_gpgcheck: enable repository signature validation :param bool pkg_gpgcheck: enable package signature validation """ if credentials_file: repo_secret = os.sep.join( [self.shared_zypper_dir['credentials-dir'], credentials_file] ) if os.path.exists(repo_secret): Path.wipe(repo_secret) if user and secret: uri = ''.join([uri, '?credentials=', credentials_file]) with open(repo_secret, 'w') as credentials: credentials.write('username={0}{1}'.format( user, os.linesep) ) credentials.write('password={0}{1}'.format( secret, os.linesep) ) repo_file = ''.join( [self.shared_zypper_dir['reposd-dir'], '/', name, '.repo'] ) self.repo_names.append(''.join([name, '.repo'])) if os.path.exists(repo_file): Path.wipe(repo_file) self._backup_package_cache() Command.run( ['zypper'] + self.zypper_args + [ '--root', self.root_dir, 'addrepo', '--refresh', '--type', self._translate_repo_type(repo_type), '--keep-packages', '-C', uri, name ], self.command_env ) if prio or repo_gpgcheck is not None or pkg_gpgcheck is not None: repo_config = ConfigParser() repo_config.read(repo_file) if repo_gpgcheck is not None: repo_config.set( name, 'repo_gpgcheck', '1' if repo_gpgcheck else '0' ) if pkg_gpgcheck is not None: repo_config.set( name, 'pkg_gpgcheck', '1' if pkg_gpgcheck else '0' ) if prio: repo_config.set( name, 'priority', format(prio) ) with open(repo_file, 'w') as repo: repo_config.write(repo) self._restore_package_cache()
class RepositoryZypper(RepositoryBase): """ Implements repo handling for zypper package manager """ def post_init(self, custom_args=None): """ Post initialization method Store custom zypper arguments and create runtime configuration and environment Attributes * :attr:`shared_zypper_dir` shared directory between image root and build system root * :attr:`runtime_zypper_config_file` zypper runtime config file name * :attr:`runtime_zypp_config_file` libzypp runtime config file name * :attr:`zypper_args` zypper caller args plus additional custom args * :attr:`command_env` customized os.environ for zypper * :attr:`runtime_zypper_config` Instance of ConfigParser :param list custom_args: zypper arguments """ self.custom_args = custom_args self.exclude_docs = False self.gpgcheck = False if not custom_args: self.custom_args = [] # extract custom arguments used for zypp config only if 'exclude_docs' in self.custom_args: self.custom_args.remove('exclude_docs') self.exclude_docs = True if 'check_signatures' in self.custom_args: self.custom_args.remove('check_signatures') self.gpgcheck = True self.repo_names = [] # zypper support by default point all actions into the root # directory of the image system. This information is passed # as arguments to zypper and adapted if the call runs as # chrooted operation. Therefore the use of the shared location # via RootBind::mount_shared_directory is optional but # recommended to make use of the repo cache manager_base = self.root_dir + self.shared_location self.shared_zypper_dir = { 'pkg-cache-dir': os.sep.join( [manager_base, 'packages'] ), 'reposd-dir': os.sep.join( [manager_base, 'zypper/repos'] ), 'credentials-dir': os.sep.join( [manager_base, 'zypper/credentials'] ), 'solv-cache-dir': os.sep.join( [manager_base, 'zypper/solv'] ), 'raw-cache-dir': os.sep.join( [manager_base, 'zypper/raw'] ), 'cache-dir': os.sep.join( [manager_base, 'zypper'] ) } self.runtime_zypper_config_file = NamedTemporaryFile( dir=self.root_dir ) self.runtime_zypp_config_file = NamedTemporaryFile( dir=self.root_dir ) self.zypper_args = [ '--non-interactive', '--pkg-cache-dir', self.shared_zypper_dir['pkg-cache-dir'], '--reposd-dir', self.shared_zypper_dir['reposd-dir'], '--solv-cache-dir', self.shared_zypper_dir['solv-cache-dir'], '--cache-dir', self.shared_zypper_dir['cache-dir'], '--raw-cache-dir', self.shared_zypper_dir['raw-cache-dir'], '--config', self.runtime_zypper_config_file.name ] + self.custom_args self.command_env = self._create_zypper_runtime_environment() # config file parameters for zypper tool self.runtime_zypper_config = ConfigParser() self.runtime_zypper_config.add_section('main') # config file parameters for libzypp library self.runtime_zypp_config = ConfigParser() self.runtime_zypp_config.add_section('main') self.runtime_zypp_config.set( 'main', 'credentials.global.dir', self.shared_zypper_dir['credentials-dir'] ) if self.exclude_docs: self.runtime_zypp_config.set( 'main', 'rpm.install.excludedocs', 'yes' ) if self.gpgcheck: self.runtime_zypp_config.set( 'main', 'gpgcheck', '1' ) else: self.runtime_zypp_config.set( 'main', 'gpgcheck', '0' ) self._write_runtime_config() def use_default_location(self): """ Setup zypper repository operations to store all data in the default places """ self.shared_zypper_dir['reposd-dir'] = \ self.root_dir + '/etc/zypp/repos.d' self.shared_zypper_dir['credentials-dir'] = \ self.root_dir + '/etc/zypp/credentials.d' self.zypper_args = [ '--non-interactive', ] + self.custom_args self.command_env = dict(os.environ, LANG='C') def runtime_config(self): """ zypper runtime configuration and environment """ return { 'zypper_args': self.zypper_args, 'command_env': self.command_env } def add_repo( self, name, uri, repo_type='rpm-md', prio=None, dist=None, components=None, user=None, secret=None, credentials_file=None, repo_gpgcheck=None, pkg_gpgcheck=None ): """ Add zypper repository :param string name: repository name :param string uri: repository URI :param repo_type: repostory type name :param int prio: zypper repostory priority :param dist: unused :param components: unused :param user: credentials username :param secret: credentials password :param credentials_file: zypper credentials file :param bool repo_gpgcheck: enable repository signature validation :param bool pkg_gpgcheck: enable package signature validation """ if credentials_file: repo_secret = os.sep.join( [self.shared_zypper_dir['credentials-dir'], credentials_file] ) if os.path.exists(repo_secret): Path.wipe(repo_secret) if user and secret: uri = ''.join([uri, '?credentials=', credentials_file]) with open(repo_secret, 'w') as credentials: credentials.write('username={0}{1}'.format( user, os.linesep) ) credentials.write('password={0}{1}'.format( secret, os.linesep) ) repo_file = ''.join( [self.shared_zypper_dir['reposd-dir'], '/', name, '.repo'] ) self.repo_names.append(''.join([name, '.repo'])) if os.path.exists(repo_file): Path.wipe(repo_file) self._backup_package_cache() Command.run( ['zypper'] + self.zypper_args + [ '--root', self.root_dir, 'addrepo', '--refresh', '--type', self._translate_repo_type(repo_type), '--keep-packages', '-C', uri, name ], self.command_env ) if prio or repo_gpgcheck is not None or pkg_gpgcheck is not None: repo_config = ConfigParser() repo_config.read(repo_file) if repo_gpgcheck is not None: repo_config.set( name, 'repo_gpgcheck', '1' if repo_gpgcheck else '0' ) if pkg_gpgcheck is not None: repo_config.set( name, 'pkg_gpgcheck', '1' if pkg_gpgcheck else '0' ) if prio: repo_config.set( name, 'priority', format(prio) ) with open(repo_file, 'w') as repo: repo_config.write(repo) self._restore_package_cache() def import_trusted_keys(self, signing_keys): """ Imports trusted keys into the image :param list signing_keys: list of the key files to import """ for key in signing_keys: Command.run(['rpm', '--root', self.root_dir, '--import', key]) def delete_repo(self, name): """ Delete zypper repository :param string name: repository name """ Command.run( ['zypper'] + self.zypper_args + [ '--root', self.root_dir, 'removerepo', name ], self.command_env ) def delete_all_repos(self): """ Delete all zypper repositories """ Path.wipe(self.shared_zypper_dir['reposd-dir']) Path.create(self.shared_zypper_dir['reposd-dir']) def delete_repo_cache(self, name): """ Delete zypper repository cache The cache data for each repository is stored in a list of directories of the same name as the repository name. The method deletes these directories to cleanup the cache information :param string name: repository name """ Path.wipe( os.sep.join([self.shared_zypper_dir['pkg-cache-dir'], name]) ) Path.wipe( os.sep.join([self.shared_zypper_dir['solv-cache-dir'], name]) ) Path.wipe( os.sep.join([self.shared_zypper_dir['raw-cache-dir'], name]) ) def cleanup_unused_repos(self): """ Delete unused zypper repositories zypper creates a system solvable which is unwanted for the purpose of building images. In addition zypper fails with an error message 'Failed to cache rpm database' if such a system solvable exists and a new root system is created All other repository configurations which are not used for this build must be removed too, otherwise they are taken into account for the package installations """ solv_dir = self.shared_zypper_dir['solv-cache-dir'] Path.wipe(solv_dir + '/@System') repos_dir = self.shared_zypper_dir['reposd-dir'] repo_files = list(os.walk(repos_dir))[0][2] for repo_file in repo_files: if repo_file not in self.repo_names: Path.wipe(repos_dir + '/' + repo_file) def _create_zypper_runtime_environment(self): for zypper_dir in list(self.shared_zypper_dir.values()): Path.create(zypper_dir) return dict( os.environ, LANG='C', ZYPP_CONF=self.runtime_zypp_config_file.name ) def _write_runtime_config(self): with open(self.runtime_zypper_config_file.name, 'w') as config: self.runtime_zypper_config.write(config) with open(self.runtime_zypp_config_file.name, 'w') as config: self.runtime_zypp_config.write(config) def _translate_repo_type(self, repo_type): """ Translate kiwi supported common repo type names from the schema into the name the zyper package manager understands """ zypper_type_for = { 'rpm-md': 'YUM', 'rpm-dir': 'Plaindir', 'yast2': 'YaST' } try: return zypper_type_for[repo_type] except Exception: raise KiwiRepoTypeUnknown( 'Unsupported zypper repo type: %s' % repo_type ) def _backup_package_cache(self): """ preserve package cache which otherwise will be removed by zypper if no repo file is found. But this situation is normal for an image build process which setup and remove repos for building at runtime """ self._move_package_cache(backup=True) def _restore_package_cache(self): """ restore preserved package cache at the location passed to zypper """ self._move_package_cache(restore=True) def _move_package_cache(self, backup=False, restore=False): package_cache = self.shared_location + '/packages' package_cache_moved = package_cache + '.moved' if backup and os.path.exists(package_cache): Command.run( ['mv', '-f', package_cache, package_cache_moved] ) elif restore and os.path.exists(package_cache_moved): Command.run( ['mv', '-f', package_cache_moved, package_cache] ) def __del__(self): self._restore_package_cache()
class SetupConfig(object): """Wrapper around the setup.cfg file if available. One reason is to cleanup setup.cfg from these settings:: [egg_info] tag_build = dev tag_svn_revision = true Another is for optional zest.releaser-specific settings:: [zest.releaser] no-input = yes """ config_filename = SETUP_CONFIG_FILE def __init__(self): """Grab the configuration (overridable for test purposes)""" # If there is a setup.cfg in the package, parse it if not os.path.exists(os.path.join(utils.PACKAGE_ROOT, self.config_filename)): self.config = None return self.config = ConfigParser() with codecs.open(self.config_filename, 'r', 'utf8') as fp: self.config.readfp(fp) def has_bad_commands(self): if self.config is None: return False if not self.config.has_section('egg_info'): # bail out early as the main section is not there return False bad = False # Check 1. if self.config.has_option('egg_info', 'tag_build'): # Might still be empty. value = self.config.get('egg_info', 'tag_build') if value: logger.warn("%s has [egg_info] tag_build set to %r", self.config_filename, value) bad = True # Check 2. if self.config.has_option('egg_info', 'tag_svn_revision'): if self.config.getboolean('egg_info', 'tag_svn_revision'): value = self.config.get('egg_info', 'tag_svn_revision') logger.warn("%s has [egg_info] tag_svn_revision set to %r", self.config_filename, value) bad = True return bad def fix_config(self): if not self.has_bad_commands(): logger.warn("Cannot fix already fine %s.", self.config_filename) return if self.config.has_option('egg_info', 'tag_build'): self.config.set('egg_info', 'tag_build', '') if self.config.has_option('egg_info', 'tag_svn_revision'): self.config.set('egg_info', 'tag_svn_revision', 'false') new_setup = open(self.config_filename, 'w') try: self.config.write(new_setup) finally: new_setup.close() logger.info("New setup.cfg contents:") print(''.join(open(self.config_filename).readlines())) def no_input(self): """Return whether the user wants to run in no-input mode. Enable this mode by adding a ``no-input`` option:: [zest.releaser] no-input = yes The default when this option has not been set is False. Standard config rules apply, so you can use upper or lower or mixed case and specify 0, false, no or off for boolean False, and 1, on, true or yes for boolean True. """ default = False if self.config is None: return default try: result = self.config.getboolean('zest.releaser', 'no-input') except (NoSectionError, NoOptionError, ValueError): return default return result def python_file_with_version(self): """Return Python filename with ``__version__`` marker, if configured. Enable this by adding a ``python-file-with-version`` option:: [zest.releaser] python-file-with-version = reinout/maurits.py Return None when nothing has been configured. """ default = None if self.config is None: return default try: result = self.config.get( 'zest.releaser', 'python-file-with-version') except (NoSectionError, NoOptionError, ValueError): return default return result
class Configuration(object): defaults = {} def __init__(self, filename=None): self._config = ConfigParser() self._set_defaults() self._state_drivers = {} if filename is not None: self.load(filename) def _set_defaults(self): """Set defaults for config """ self._config.add_section('main') for key, value in six.iteritems(self.defaults): if isinstance(value, dict): self._config.add_section(key) for subkey, subvalue in six.iteritems(value): self._config.set(key, subkey, subvalue) else: self._config.set('main', key, value) def load(self, filename): """Load the configuration by filename """ self._config.read(filename) def save(self, filename): """Save the configuration to a file """ with open(filename, 'w') as handle: self._config.write(handle) @staticmethod def sanitize(items): options = {} for key, value in items: if key.endswith('[int]'): options[key[:-5]] = int(value) elif key.endswith('[bool]'): value = value.lower() if value in BOOL_MAP[True]: value = True elif value in BOOL_MAP[False]: value = False else: raise ValueError('Expected boolean for {}'.format(key)) options[key[:-6]] = value else: options[key] = value return options def __getitem__(self, name): if self._config.has_section(name): return self.sanitize(self._config.items(name)) elif name == 'main': raise ValueError('Missing main section of configuration') return self['main'][name] def state_driver(self, name='ai'): """Get an instance of the state driver """ from database import state if name not in self._state_drivers: extras = self[name] driver = extras.pop('state-driver') if driver == 'redis': self._state_drivers[name] = state.RedisDriver(self, extras) elif driver == 'dict': self._state_drivers[name] = state.MemoryDriver(self, extras) else: raise ValueError('Unknown state driver') return self._state_drivers[name]
class RepositoryZypper(RepositoryBase): """ Implements repo handling for zypper package manager """ def post_init(self, custom_args=None): """ Post initialization method Store custom zypper arguments and create runtime configuration and environment Attributes * :attr:`shared_zypper_dir` shared directory between image root and build system root * :attr:`runtime_zypper_config_file` zypper runtime config file name * :attr:`runtime_zypp_config_file` libzypp runtime config file name * :attr:`zypper_args` zypper caller args plus additional custom args * :attr:`command_env` customized os.environ for zypper * :attr:`runtime_zypper_config` Instance of ConfigParser :param list custom_args: zypper arguments """ self.custom_args = custom_args if not custom_args: self.custom_args = [] self.repo_names = [] # zypper support by default point all actions into the root # directory of the image system. This information is passed # as arguments to zypper and adapted if the call runs as # chrooted operation. Therefore the use of the shared location # via RootBind::mount_shared_directory is optional but # recommended to make use of the repo cache manager_base = self.root_dir + self.shared_location self.shared_zypper_dir = { 'pkg-cache-dir': manager_base + '/packages', 'reposd-dir': manager_base + '/zypper/repos', 'solv-cache-dir': manager_base + '/zypper/solv', 'raw-cache-dir': manager_base + '/zypper/raw', 'cache-dir': manager_base + '/zypper' } self.runtime_zypper_config_file = NamedTemporaryFile( dir=self.root_dir ) self.runtime_zypp_config_file = NamedTemporaryFile( dir=self.root_dir ) self.zypper_args = [ '--non-interactive', '--no-gpg-checks', '--pkg-cache-dir', self.shared_zypper_dir['pkg-cache-dir'], '--reposd-dir', self.shared_zypper_dir['reposd-dir'], '--solv-cache-dir', self.shared_zypper_dir['solv-cache-dir'], '--cache-dir', self.shared_zypper_dir['cache-dir'], '--raw-cache-dir', self.shared_zypper_dir['raw-cache-dir'], '--config', self.runtime_zypper_config_file.name ] + self.custom_args self.command_env = self._create_zypper_runtime_environment() # config file parameters for zypper tool self.runtime_zypper_config = ConfigParser() self.runtime_zypper_config.add_section('main') # config file parameters for libzypp library self.runtime_zypp_config = ConfigParser() self.runtime_zypp_config.add_section('main') self.runtime_zypp_config.set( 'main', 'cachedir', self.shared_zypper_dir['cache-dir'] ) self.runtime_zypp_config.set( 'main', 'metadatadir', self.shared_zypper_dir['raw-cache-dir'] ) self.runtime_zypp_config.set( 'main', 'solvfilesdir', self.shared_zypper_dir['solv-cache-dir'] ) self.runtime_zypp_config.set( 'main', 'packagesdir', self.shared_zypper_dir['pkg-cache-dir'] ) self._write_runtime_config() def use_default_location(self): """ Setup zypper repository operations to store all data in the default places """ self.shared_zypper_dir['reposd-dir'] = \ self.root_dir + '/etc/zypp/repos.d' self.zypper_args = [ '--non-interactive', '--no-gpg-checks' ] + self.custom_args self.command_env = dict(os.environ, LANG='C') def runtime_config(self): """ zypper runtime configuration and environment """ return { 'zypper_args': self.zypper_args, 'command_env': self.command_env } def add_repo( self, name, uri, repo_type='rpm-md', prio=None, dist=None, components=None ): """ Add zypper repository :param string name: repository name :param string uri: repository URI :param repo_type: repostory type name :param int prio: yum repostory priority :param dist: unused :param components: unused """ repo_file = self.shared_zypper_dir['reposd-dir'] + '/' + name + '.repo' self.repo_names.append(name + '.repo') if os.path.exists(repo_file): Path.wipe(repo_file) self._backup_package_cache() Command.run( ['zypper'] + self.zypper_args + [ '--root', self.root_dir, 'addrepo', '--refresh', '--type', self._translate_repo_type(repo_type), '--keep-packages', '-C', uri, name ], self.command_env ) if prio: Command.run( ['zypper'] + self.zypper_args + [ '--root', self.root_dir, 'modifyrepo', '--priority', format(prio), name ], self.command_env ) self._restore_package_cache() def delete_repo(self, name): """ Delete zypper repository :param string name: repository name """ Command.run( ['zypper'] + self.zypper_args + [ '--root', self.root_dir, 'removerepo', name ], self.command_env ) def delete_all_repos(self): """ Delete all zypper repositories """ Path.wipe(self.shared_zypper_dir['reposd-dir']) Path.create(self.shared_zypper_dir['reposd-dir']) def cleanup_unused_repos(self): """ Delete unused zypper repositories zypper creates a system solvable which is unwanted for the purpose of building images. In addition zypper fails with an error message 'Failed to cache rpm database' if such a system solvable exists and a new root system is created All other repository configurations which are not used for this build must be removed too, otherwise they are taken into account for the package installations """ solv_dir = self.shared_zypper_dir['solv-cache-dir'] Path.wipe(solv_dir + '/@System') repos_dir = self.shared_zypper_dir['reposd-dir'] repo_files = list(os.walk(repos_dir))[0][2] for repo_file in repo_files: if repo_file not in self.repo_names: Path.wipe(repos_dir + '/' + repo_file) def _create_zypper_runtime_environment(self): for zypper_dir in list(self.shared_zypper_dir.values()): Path.create(zypper_dir) return dict( os.environ, LANG='C', ZYPP_CONF=self.runtime_zypp_config_file.name ) def _write_runtime_config(self): with open(self.runtime_zypper_config_file.name, 'w') as config: self.runtime_zypper_config.write(config) with open(self.runtime_zypp_config_file.name, 'w') as config: self.runtime_zypp_config.write(config) def _translate_repo_type(self, repo_type): """ Translate kiwi supported common repo type names from the schema into the name the zyper package manager understands """ zypper_type_for = { 'rpm-md': 'YUM', 'rpm-dir': 'Plaindir', 'yast2': 'YaST' } try: return zypper_type_for[repo_type] except Exception: raise KiwiRepoTypeUnknown( 'Unsupported zypper repo type: %s' % repo_type ) def _backup_package_cache(self): """ preserve package cache which otherwise will be removed by zypper if no repo file is found. But this situation is normal for an image build process which setup and remove repos for building at runtime """ self._move_package_cache(backup=True) def _restore_package_cache(self): """ restore preserved package cache at the location passed to zypper """ self._move_package_cache(restore=True) def _move_package_cache(self, backup=False, restore=False): package_cache = self.shared_location + '/packages' package_cache_moved = package_cache + '.moved' if backup and os.path.exists(package_cache): Command.run( ['mv', '-f', package_cache, package_cache_moved] ) elif restore and os.path.exists(package_cache_moved): Command.run( ['mv', '-f', package_cache_moved, package_cache] ) def __del__(self): self._restore_package_cache()
class RepositoryYum(RepositoryBase): """ Implements repository handling for yum package manager """ def post_init(self, custom_args=None): """ Post initialization method Store custom yum arguments and create runtime configuration and environment Attributes * :attr:`shared_yum_dir` shared directory between image root and build system root * :attr:`runtime_yum_config_file` yum runtime config file name * :attr:`command_env` customized os.environ for yum * :attr:`runtime_yum_config` Instance of ConfigParser :param list custom_args: yum arguments """ self.custom_args = custom_args if not custom_args: self.custom_args = [] self.repo_names = [] # yum support is based on creating repo files which contains # path names to the repo and its cache. In order to allow a # persistent use of the files in and outside of a chroot call # an active bind mount from RootBind::mount_shared_directory # is expected and required manager_base = self.shared_location + '/yum' self.shared_yum_dir = { 'reposd-dir': manager_base + '/repos', 'cache-dir': manager_base + '/cache' } self.runtime_yum_config_file = NamedTemporaryFile( dir=self.root_dir ) self.yum_args = [ '-c', self.runtime_yum_config_file.name, '-y' ] + self.custom_args self.command_env = self._create_yum_runtime_environment() # config file parameters for yum tool self._create_runtime_config_parser() self._write_runtime_config() def use_default_location(self): """ Setup yum repository operations to store all data in the default places """ self.shared_yum_dir['reposd-dir'] = \ self.root_dir + '/etc/yum/repos.d' self.shared_yum_dir['cache-dir'] = \ self.root_dir + '/var/cache/yum' self._create_runtime_config_parser() self._write_runtime_config() def runtime_config(self): """ yum runtime configuration and environment """ return { 'yum_args': self.yum_args, 'command_env': self.command_env } def add_repo( self, name, uri, repo_type='rpm-md', prio=None, dist=None, components=None ): """ Add yum repository :param string name: repository base file name :param string uri: repository URI :param repo_type: repostory type name :param int prio: yum repostory priority :param dist: unused :param components: unused """ repo_file = self.shared_yum_dir['reposd-dir'] + '/' + name + '.repo' self.repo_names.append(name + '.repo') if os.path.exists(uri): # yum requires local paths to take the file: type uri = 'file://' + uri repo_config = ConfigParser() repo_config.add_section(name) repo_config.set( name, 'name', name ) repo_config.set( name, 'baseurl', uri ) if prio: repo_config.set( name, 'priority', format(prio) ) with open(repo_file, 'w') as repo: repo_config.write(repo) def delete_repo(self, name): """ Delete yum repository :param string name: repository base file name """ Path.wipe( self.shared_yum_dir['reposd-dir'] + '/' + name + '.repo' ) def delete_all_repos(self): """ Delete all yum repositories """ Path.wipe(self.shared_yum_dir['reposd-dir']) Path.create(self.shared_yum_dir['reposd-dir']) def cleanup_unused_repos(self): """ Delete unused yum repositories Repository configurations which are not used for this build must be removed otherwise they are taken into account for the package installations """ repos_dir = self.shared_yum_dir['reposd-dir'] repo_files = list(os.walk(repos_dir))[0][2] for repo_file in repo_files: if repo_file not in self.repo_names: Path.wipe(repos_dir + '/' + repo_file) def _create_yum_runtime_environment(self): for yum_dir in list(self.shared_yum_dir.values()): Path.create(yum_dir) return dict( os.environ, LANG='C' ) def _create_runtime_config_parser(self): self.runtime_yum_config = ConfigParser() self.runtime_yum_config.add_section('main') self.runtime_yum_config.set( 'main', 'cachedir', self.shared_yum_dir['cache-dir'] ) self.runtime_yum_config.set( 'main', 'reposdir', self.shared_yum_dir['reposd-dir'] ) self.runtime_yum_config.set( 'main', 'keepcache', '1' ) self.runtime_yum_config.set( 'main', 'debuglevel', '2' ) self.runtime_yum_config.set( 'main', 'pkgpolicy', 'newest' ) self.runtime_yum_config.set( 'main', 'tolerant', '0' ) self.runtime_yum_config.set( 'main', 'exactarch', '1' ) self.runtime_yum_config.set( 'main', 'obsoletes', '1' ) self.runtime_yum_config.set( 'main', 'plugins', '1' ) self.runtime_yum_config.set( 'main', 'metadata_expire', '1800' ) self.runtime_yum_config.set( 'main', 'group_command', 'compat' ) def _write_runtime_config(self): with open(self.runtime_yum_config_file.name, 'w') as config: self.runtime_yum_config.write(config)
class SetupConfig(object): """Wrapper around the setup.cfg file if available. One reason is to cleanup setup.cfg from these settings:: [egg_info] tag_build = dev tag_svn_revision = true Another is for optional zest.releaser-specific settings:: [zest.releaser] python-file-with-version = reinout/maurits.py """ config_filename = SETUP_CONFIG_FILE def __init__(self): """Grab the configuration (overridable for test purposes)""" # If there is a setup.cfg in the package, parse it if not os.path.exists(self.config_filename): self.config = None return self.config = ConfigParser() with codecs.open(self.config_filename, 'r', 'utf8') as fp: self.config.readfp(fp) def development_marker(self): """Return development marker to be appended in postrelease Override the default ``.dev0`` in setup.cfg using a ``development-marker`` option:: [zest.releaser] development-marker = .dev1 Returns default of `.dev0` when nothing has been configured. """ try: result = self.config.get('zest.releaser', 'development-marker') except (NoSectionError, NoOptionError, ValueError): result = ".dev0" return result def has_bad_commands(self): if self.config is None: return False if not self.config.has_section('egg_info'): # bail out early as the main section is not there return False bad = False # Check 1. if self.config.has_option('egg_info', 'tag_build'): # Might still be empty. value = self.config.get('egg_info', 'tag_build') if value: logger.warn("%s has [egg_info] tag_build set to %r", self.config_filename, value) bad = True # Check 2. if self.config.has_option('egg_info', 'tag_svn_revision'): if self.config.getboolean('egg_info', 'tag_svn_revision'): value = self.config.get('egg_info', 'tag_svn_revision') logger.warn("%s has [egg_info] tag_svn_revision set to %r", self.config_filename, value) bad = True return bad def fix_config(self): if not self.has_bad_commands(): logger.warn("Cannot fix already fine %s.", self.config_filename) return if self.config.has_option('egg_info', 'tag_build'): self.config.set('egg_info', 'tag_build', '') if self.config.has_option('egg_info', 'tag_svn_revision'): self.config.set('egg_info', 'tag_svn_revision', 'false') new_setup = open(self.config_filename, 'w') try: self.config.write(new_setup) finally: new_setup.close() logger.info("New setup.cfg contents:") print(''.join(open(self.config_filename).readlines())) def python_file_with_version(self): """Return Python filename with ``__version__`` marker, if configured. Enable this by adding a ``python-file-with-version`` option:: [zest.releaser] python-file-with-version = reinout/maurits.py Return None when nothing has been configured. """ default = None if self.config is None: return default try: result = self.config.get( 'zest.releaser', 'python-file-with-version') except (NoSectionError, NoOptionError, ValueError): return default return result
def makeconfigfile(fname,beamlist,radarname,simparams_orig): """This will make the config file based off of the desired input parmeters. Inputs fname - Name of the file as a string. beamlist - A list of beams numbers used by the AMISRS radarname - A string that is the name of the radar being simulated. simparams_orig - A set of simulation parameters in a dictionary.""" fname = Path(fname).expanduser() curpath = Path(__file__).resolve().parent d_file = curpath/'default.ini' fext = fname.suffix # reduce the number of stuff needed to be saved and avoid problems with writing keys2save = ['IPP', 'TimeLim', 'RangeLims', 'Pulselength', 't_s', 'Pulsetype', 'Tint', 'Fitinter', 'NNs', 'dtype', 'ambupsamp', 'species', 'numpoints', 'startfile', 'FitType','beamrate', 'outangles'] if not 'beamrate' in simparams_orig.keys(): simparams_orig['beamrate'] = 1 if not 'outangles' in simparams_orig.keys(): simparams_orig['outangles'] = beamlist simparams = {i:simparams_orig[i] for i in keys2save} if fext =='.pickle': pickleFile = fname.open('wb') pickle.dump([{'beamlist':beamlist,'radarname':radarname},simparams],pickleFile) pickleFile.close() elif fext=='.yml': with fname.open('w') as f: yaml.dump([{'beamlist':beamlist,'radarname':radarname},simparams], f) elif fext =='.ini': defaultparser = ConfigParser() defaultparser.read(str(d_file)) # config = configparser() # config.read(fname) cfgfile = open(str(fname),'w') config = ConfigParser(allow_no_value = True) config.add_section('section 1') beamstring = "" for beam in beamlist: beamstring += str(beam) beamstring += " " config.set('section 1','; beamlist must be list of ints') config.set('section 1','beamlist',beamstring) config.set('section 1','; radarname can be pfisr, risr, or sondastrom') config.set('section 1','radarname',radarname) config.add_section('simparams') config.add_section('simparamsnames') defitems = [i[0] for i in defaultparser.items('simparamsnotes')] for param in simparams: if param=='Beamlist': continue if param.lower() in defitems: paramnote = defaultparser.get('simparamsnotes',param.lower()) else: paramnote = 'Not in default parameters' config.set('simparams','; '+param +' '+paramnote) # for the output list of angles if param.lower()=='outangles': outstr = '' beamlistlist = simparams[param] if beamlistlist=='': beamlistlist=beamlist for ilist in beamlistlist: if isinstance(ilist,list) or isinstance(ilist,sp.ndarray): for inum in ilist: outstr=outstr+str(inum)+' ' else: outstr=outstr+str(ilist) outstr=outstr+', ' outstr=outstr[:-2] config.set('simparams',param,outstr) elif isinstance(simparams[param],list): data = "" for a in simparams[param]: data += str(a) data += " " config.set('simparams',param,str(data)) else: #TODO config.set() is obsolete, undefined behavior! use mapping protocol instead https://docs.python.org/3/library/configparser.html#mapping-protocol-access config.set('simparams',param,str(simparams[param])) config.set('simparamsnames',param,param) config.write(cfgfile) cfgfile.close() else: raise ValueError('fname needs to have an extension of .pickle or .ini')
class RepositoryYum(RepositoryBase): """ Implements repository handling for yum package manager """ def post_init(self, custom_args=None): """ Post initialization method Store custom yum arguments and create runtime configuration and environment Attributes * :attr:`shared_yum_dir` shared directory between image root and build system root * :attr:`runtime_yum_config_file` yum runtime config file name * :attr:`command_env` customized os.environ for yum * :attr:`runtime_yum_config` Instance of ConfigParser :param list custom_args: yum arguments """ self.custom_args = custom_args if not custom_args: self.custom_args = [] # extract custom arguments not used in yum call if 'exclude_docs' in self.custom_args: self.custom_args.remove('exclude_docs') log.warning('rpm-excludedocs not supported for yum: ignoring') if 'check_signatures' in self.custom_args: self.custom_args.remove('check_signatures') self.gpg_check = '1' else: self.gpg_check = '0' self.repo_names = [] # yum support is based on creating repo files which contains # path names to the repo and its cache. In order to allow a # persistent use of the files in and outside of a chroot call # an active bind mount from RootBind::mount_shared_directory # is expected and required manager_base = self.shared_location + '/yum' self.shared_yum_dir = { 'reposd-dir': manager_base + '/repos', 'cache-dir': manager_base + '/cache', 'pluginconf-dir': manager_base + '/pluginconf' } self.runtime_yum_config_file = NamedTemporaryFile( dir=self.root_dir ) self.yum_args = [ '-c', self.runtime_yum_config_file.name, '-y' ] + self.custom_args self.command_env = self._create_yum_runtime_environment() # config file parameters for yum tool self._create_runtime_config_parser() self._create_runtime_plugin_config_parser() self._write_runtime_config() def use_default_location(self): """ Setup yum repository operations to store all data in the default places """ self.shared_yum_dir['reposd-dir'] = \ self.root_dir + '/etc/yum.repos.d' self.shared_yum_dir['cache-dir'] = \ self.root_dir + '/var/cache/yum' self.shared_yum_dir['pluginconf-dir'] = \ self.root_dir + '/etc/yum/pluginconf.d' self._create_runtime_config_parser() self._create_runtime_plugin_config_parser() self._write_runtime_config() def runtime_config(self): """ yum runtime configuration and environment """ return { 'yum_args': self.yum_args, 'command_env': self.command_env } def add_repo( self, name, uri, repo_type='rpm-md', prio=None, dist=None, components=None, user=None, secret=None, credentials_file=None, repo_gpgcheck=None, pkg_gpgcheck=None ): """ Add yum repository :param string name: repository base file name :param string uri: repository URI :param repo_type: repostory type name :param int prio: yum repostory priority :param dist: unused :param components: unused :param user: unused :param secret: unused :param credentials_file: unused :param bool repo_gpgcheck: enable repository signature validation :param bool pkg_gpgcheck: enable package signature validation """ repo_file = self.shared_yum_dir['reposd-dir'] + '/' + name + '.repo' self.repo_names.append(name + '.repo') if os.path.exists(uri): # yum requires local paths to take the file: type uri = 'file://' + uri repo_config = ConfigParser() repo_config.add_section(name) repo_config.set( name, 'name', name ) repo_config.set( name, 'baseurl', uri ) repo_config.set( name, 'enabled', '1' ) if prio: repo_config.set( name, 'priority', format(prio) ) if repo_gpgcheck is not None: repo_config.set( name, 'repo_gpgcheck', '1' if repo_gpgcheck else '0' ) if pkg_gpgcheck is not None: repo_config.set( name, 'gpgcheck', '1' if pkg_gpgcheck else '0' ) with open(repo_file, 'w') as repo: repo_config.write(RepositoryYumSpaceRemover(repo)) def import_trusted_keys(self, signing_keys): """ Imports trusted keys into the image :param list signing_keys: list of the key files to import """ for key in signing_keys: Command.run(['rpm', '--root', self.root_dir, '--import', key]) def delete_repo(self, name): """ Delete yum repository :param string name: repository base file name """ Path.wipe( self.shared_yum_dir['reposd-dir'] + '/' + name + '.repo' ) def delete_all_repos(self): """ Delete all yum repositories """ Path.wipe(self.shared_yum_dir['reposd-dir']) Path.create(self.shared_yum_dir['reposd-dir']) def delete_repo_cache(self, name): """ Delete yum repository cache The cache data for each repository is stored in a directory of the same name as the repository name. The method deletes this directory to cleanup the cache information :param string name: repository name """ Path.wipe( os.sep.join([self.shared_yum_dir['cache-dir'], name]) ) def cleanup_unused_repos(self): """ Delete unused yum repositories Repository configurations which are not used for this build must be removed otherwise they are taken into account for the package installations """ repos_dir = self.shared_yum_dir['reposd-dir'] repo_files = list(os.walk(repos_dir))[0][2] for repo_file in repo_files: if repo_file not in self.repo_names: Path.wipe(repos_dir + '/' + repo_file) def _create_yum_runtime_environment(self): for yum_dir in list(self.shared_yum_dir.values()): Path.create(yum_dir) return dict( os.environ, LANG='C' ) def _create_runtime_config_parser(self): self.runtime_yum_config = ConfigParser() self.runtime_yum_config.add_section('main') self.runtime_yum_config.set( 'main', 'cachedir', self.shared_yum_dir['cache-dir'] ) self.runtime_yum_config.set( 'main', 'reposdir', self.shared_yum_dir['reposd-dir'] ) self.runtime_yum_config.set( 'main', 'pluginconfpath', self.shared_yum_dir['pluginconf-dir'] ) self.runtime_yum_config.set( 'main', 'keepcache', '1' ) self.runtime_yum_config.set( 'main', 'debuglevel', '2' ) self.runtime_yum_config.set( 'main', 'pkgpolicy', 'newest' ) self.runtime_yum_config.set( 'main', 'tolerant', '0' ) self.runtime_yum_config.set( 'main', 'exactarch', '1' ) self.runtime_yum_config.set( 'main', 'obsoletes', '1' ) self.runtime_yum_config.set( 'main', 'plugins', '1' ) self.runtime_yum_config.set( 'main', 'gpgcheck', self.gpg_check ) self.runtime_yum_config.set( 'main', 'metadata_expire', '1800' ) self.runtime_yum_config.set( 'main', 'group_command', 'compat' ) def _create_runtime_plugin_config_parser(self): self.runtime_yum_plugin_config = ConfigParser() self.runtime_yum_plugin_config.add_section('main') self.runtime_yum_plugin_config.set( 'main', 'enabled', '1' ) def _write_runtime_config(self): with open(self.runtime_yum_config_file.name, 'w') as config: self.runtime_yum_config.write( RepositoryYumSpaceRemover(config) ) yum_plugin_config_file = \ self.shared_yum_dir['pluginconf-dir'] + '/priorities.conf' with open(yum_plugin_config_file, 'w') as pluginconfig: self.runtime_yum_plugin_config.write( RepositoryYumSpaceRemover(pluginconfig) )
def _post_execute(self): """ calculate all peak centers calculate relative shifts to a reference detector. not necessarily the same as the reference detector used for setting the magnet """ graph = self.graph plot = graph.plots[0] # time.sleep(0.05) # wait for graph to fully update # time.sleep(0.1) # def get_peak_center(i, di): def get_peak_center(di): try: lp = plot.plots[di][0] except KeyError: lp = plot.plots['*{}'.format(di)][0] xs = lp.index.get_data() ys = lp.value.get_data() cx = None if len(xs) and len(ys): try: result = calculate_peak_center(xs, ys) cx = result[0][1] except PeakCenterError: self.warning('no peak center for {}'.format(di)) return cx spec = self.spectrometer centers = {d: get_peak_center(d) for d in self.active_detectors} print(centers) ref = self.reference_detector post = centers[ref] if post is None: return results = [] for di in self.active_detectors: di = spec.get_detector(di) cen = centers[di.name] if cen is None: continue dac_dev = post - cen if self.spectrometer.simulation: dac_dev = -random() if abs(dac_dev) < 0.001: self.info('no offset detected between {} and {}'.format(ref, di.name)) continue defl = di.map_dac_to_deflection(dac_dev) self.info('{} dac dev. {:0.5f}. converted to deflection voltage {:0.1f}.'.format(di.name, dac_dev, defl)) curdefl = di.deflection newdefl = int(curdefl + defl) newdefl = max(0, min(newdefl, self.spectrometer.max_deflection)) if newdefl >= 0: results.append(DeflectionResult(di.name, curdefl, newdefl)) if not results: self.information_dialog('no deflection changes needed') else: rv = ResultsView(results=results) info = rv.edit_traits() if info.result: config = ConfigParser() # p = os.path.join(paths.spectrometer_dir, 'config.cfg') p = get_spectrometer_config_path() config.read(p) for v in rv.clean_results: config.set('Deflections', v.name, v.new_deflection) det = next((d for d in self.active_detectors if d.lower() == v.name.lower())) det = spec.get_detector(det) det.deflection = v.new_deflection with open(p, 'w') as wfile: config.write(wfile) self.spectrometer.clear_cached_config()
def __init__(self, parser=None, defaults={}, writeargstofile=False, readargs=True): if parser is None: parser = argparse.ArgumentParser( description="Default psycodict parser") parser.add_argument( "-c", "--config-file", dest="config_file", metavar="FILE", help="configuration file [default: %(default)s]", default=defaults.get("config_file", "config.ini"), ) parser.add_argument( "-s", "--secrets-file", dest="secrets_file", metavar="SECRETS", help="secrets file [default: %(default)s]", default=defaults.get("secrets_file", "secrets.ini"), ) logginggroup = parser.add_argument_group("Logging options:") logginggroup.add_argument( "--slowcutoff", dest="logging_slowcutoff", metavar="SLOWCUTOFF", help="threshold to log slow queries [default: %(default)s]", default=defaults.get("logging_slowcutoff", 0.1), type=float, ) logginggroup.add_argument( "--slowlogfile", help="logfile for slow queries [default: %(default)s]", dest="logging_slowlogfile", metavar="FILE", default=defaults.get("logging_slowlogfile", "slow_queries.log"), ) # PostgresSQL options postgresqlgroup = parser.add_argument_group("PostgreSQL options") postgresqlgroup.add_argument( "--postgresql-host", dest="postgresql_host", metavar="HOST", help= "PostgreSQL server host or socket directory [default: %(default)s]", default=defaults.get("postgresql_host", "localhost"), ) postgresqlgroup.add_argument( "--postgresql-port", dest="postgresql_port", metavar="PORT", type=int, help="PostgreSQL server port [default: %(default)d]", default=defaults.get("postgresql_port", 5432), ) postgresqlgroup.add_argument( "--postgresql-user", dest="postgresql_user", metavar="USER", help="PostgreSQL username [default: %(default)s]", default=defaults.get("postgresql_user", "postgres"), ) postgresqlgroup.add_argument( "--postgresql-pass", dest="postgresql_password", metavar="PASS", help="PostgreSQL password [default: %(default)s]", default=defaults.get("postgres_password", ""), ) postgresqlgroup.add_argument( "--postgresql-dbname", dest="postgresql_dbname", metavar="DBNAME", help="PostgreSQL database name [default: %(default)s]", default="lmfdb", ) def sec_opt(key): if "_" in key: sec, opt = key.split("_", 1) else: sec = "misc" opt = key return sec, opt # 1: parsing command-line arguments if readargs: args = parser.parse_args() else: # only read config file args = parser.parse_args([]) args_dict = vars(args) default_arguments_dict = vars(parser.parse_args([])) del default_arguments_dict["config_file"] del default_arguments_dict["secrets_file"] self.default_args = defaultdict(dict) for key, val in default_arguments_dict.items(): sec, opt = sec_opt(key) self.default_args[sec][opt] = str(val) # reading the config file, creating it if necessary # 2/1: does config file exist? if not os.path.exists(args.config_file): write_args = deepcopy(self.default_args) if not writeargstofile: print( "Config file: %s not found, creating it with the default values" % args.config_file) else: print( "Config file: %s not found, creating it with the passed values" % args.config_file) # overwrite default arguments passed via command line args for key, val in args_dict.items(): if key in default_arguments_dict: sec, opt = sec_opt(key) write_args[sec][opt] = str(val) _cfgp = ConfigParser() # create sections for sec, options in write_args.items(): _cfgp.add_section(sec) for opt, val in options.items(): _cfgp.set(sec, opt, str(val)) with open(args.config_file, "w") as configfile: _cfgp.write(configfile) # 2/2: reading the config file _cfgp = ConfigParser() _cfgp.read(args.config_file) # 2/3: reading the secrets file, which can override the config if os.path.exists(args.secrets_file): _cfgp.read(args.secrets_file) # 3: override specific settings def file_to_args(sep="_"): ret = {} for s in _cfgp.sections(): for k, v in _cfgp.items(s): ret["%s%s%s" % (s, sep, k)] = v return ret args_file = file_to_args() for key, val in default_arguments_dict.items(): # if a nondefault value was passed through command line arguments set it # or if a default value was not set in the config file if args_dict[key] != val or key not in args_file: sec, opt = sec_opt(key) if sec not in _cfgp.sections(): _cfgp.add_section(sec) _cfgp.set(sec, opt, str(args_dict[key])) # We can derive the types from the parser type_dict = {} for action in parser._actions: if isinstance( action, (argparse._StoreTrueAction, argparse._StoreFalseAction)): type_dict[action.dest] = strbool else: type_dict[action.dest] = action.type def get(section, key): val = _cfgp.get(section, key) full = section + "_" + key type_func = type_dict.get(full) if type_func is not None: val = type_func(val) return val self.options = defaultdict(dict) for sec, options in self.default_args.items(): for opt in options: self.options[sec][opt] = get(sec, opt) self.extra_options = {} # not stored in the config file for key, val in args_dict.items(): if key not in default_arguments_dict: self.extra_options[key] = val
def __spawn_instance(self): """ Create and configure a new KRA instance using pkispawn. Creates a configuration file with IPA-specific parameters and passes it to the base class to call pkispawn """ # Create an empty and secured file (cfg_fd, cfg_file) = tempfile.mkstemp() os.close(cfg_fd) pent = pwd.getpwnam(self.service_user) os.chown(cfg_file, pent.pw_uid, pent.pw_gid) # Create KRA configuration config = ConfigParser() config.optionxform = str config.add_section("KRA") # Security Domain Authentication config.set("KRA", "pki_security_domain_https_port", "443") config.set("KRA", "pki_security_domain_password", self.admin_password) config.set("KRA", "pki_security_domain_user", self.admin_user) # issuing ca config.set("KRA", "pki_issuing_ca_uri", "https://%s" % ipautil.format_netloc(self.fqdn, 443)) # Server config.set("KRA", "pki_enable_proxy", "True") config.set("KRA", "pki_restart_configured_instance", "False") config.set("KRA", "pki_backup_keys", "True") config.set("KRA", "pki_backup_password", self.admin_password) # Client security database config.set("KRA", "pki_client_database_dir", self.agent_db) config.set("KRA", "pki_client_database_password", self.admin_password) config.set("KRA", "pki_client_database_purge", "False") config.set("KRA", "pki_client_pkcs12_password", self.admin_password) # Administrator config.set("KRA", "pki_admin_name", self.admin_user) config.set("KRA", "pki_admin_uid", self.admin_user) config.set("KRA", "pki_admin_email", "root@localhost") config.set("KRA", "pki_admin_password", self.admin_password) config.set("KRA", "pki_admin_nickname", "ipa-ca-agent") config.set("KRA", "pki_admin_subject_dn", str(DN(('cn', 'ipa-ca-agent'), self.subject_base))) config.set("KRA", "pki_import_admin_cert", "True") config.set("KRA", "pki_admin_cert_file", paths.ADMIN_CERT_PATH) config.set("KRA", "pki_client_admin_cert_p12", paths.DOGTAG_ADMIN_P12) # Directory server config.set("KRA", "pki_ds_ldap_port", "389") config.set("KRA", "pki_ds_password", self.dm_password) config.set("KRA", "pki_ds_base_dn", self.basedn) config.set("KRA", "pki_ds_database", "ipaca") config.set("KRA", "pki_ds_create_new_db", "False") self._use_ldaps_during_spawn(config) # Certificate subject DNs config.set("KRA", "pki_subsystem_subject_dn", str(DN(('cn', 'CA Subsystem'), self.subject_base))) config.set("KRA", "pki_ssl_server_subject_dn", str(DN(('cn', self.fqdn), self.subject_base))) config.set("KRA", "pki_audit_signing_subject_dn", str(DN(('cn', 'KRA Audit'), self.subject_base))) config.set( "KRA", "pki_transport_subject_dn", str(DN(('cn', 'KRA Transport Certificate'), self.subject_base))) config.set( "KRA", "pki_storage_subject_dn", str(DN(('cn', 'KRA Storage Certificate'), self.subject_base))) # Certificate nicknames # Note that both the server certs and subsystem certs reuse # the ca certs. config.set("KRA", "pki_subsystem_nickname", "subsystemCert cert-pki-ca") config.set("KRA", "pki_ssl_server_nickname", "Server-Cert cert-pki-ca") config.set("KRA", "pki_audit_signing_nickname", "auditSigningCert cert-pki-kra") config.set("KRA", "pki_transport_nickname", "transportCert cert-pki-kra") config.set("KRA", "pki_storage_nickname", "storageCert cert-pki-kra") # Shared db settings # Needed because CA and KRA share the same database # We will use the dbuser created for the CA config.set("KRA", "pki_share_db", "True") config.set( "KRA", "pki_share_dbuser_dn", str(DN(('uid', 'pkidbuser'), ('ou', 'people'), ('o', 'ipaca')))) _p12_tmpfile_handle, p12_tmpfile_name = tempfile.mkstemp(dir=paths.TMP) if self.clone: krafile = self.pkcs12_info[0] shutil.copy(krafile, p12_tmpfile_name) pent = pwd.getpwnam(self.service_user) os.chown(p12_tmpfile_name, pent.pw_uid, pent.pw_gid) # Security domain registration config.set("KRA", "pki_security_domain_hostname", self.master_host) config.set("KRA", "pki_security_domain_https_port", "443") config.set("KRA", "pki_security_domain_user", self.admin_user) config.set("KRA", "pki_security_domain_password", self.admin_password) # Clone config.set("KRA", "pki_clone", "True") config.set("KRA", "pki_clone_pkcs12_path", p12_tmpfile_name) config.set("KRA", "pki_clone_pkcs12_password", self.dm_password) config.set("KRA", "pki_clone_setup_replication", "False") config.set( "KRA", "pki_clone_uri", "https://%s" % ipautil.format_netloc(self.master_host, 443)) else: # the admin cert file is needed for the first instance of KRA cert = DogtagInstance.get_admin_cert(self) with open(paths.ADMIN_CERT_PATH, "w") as admin_path: admin_path.write(cert) # Generate configuration file with open(cfg_file, "wb") as f: config.write(f) try: DogtagInstance.spawn_instance( self, cfg_file, nolog_list=(self.dm_password, self.admin_password) ) finally: os.remove(p12_tmpfile_name) os.remove(cfg_file) shutil.move(paths.KRA_BACKUP_KEYS_P12, paths.KRACERT_P12) export_kra_agent_pem() self.log.debug("completed creating KRA instance")