def test_ok_on_valid_section(self): config = RawConfigParser() config.add_section(self.SECTION) config.set(self.SECTION, 'a-b', 'null') mgr_factory = storage.DefaultInstallMgrFactory(config, self.SECTION, mock.Mock(), {}) mgr_factory.new_install_mgr(mock.Mock(), {})
def start(): """Start application.""" import os import shutil import sys current_dir = os.path.dirname(__file__) app_dir = os.path.abspath(os.path.join(current_dir, '..')) data_dir = os.path.abspath(os.path.join(current_dir, 'data')) if os.path.isdir(data_dir): shutil.rmtree(data_dir) args = [ '--datadir={0}'.format(data_dir), '--nolaunch', ] os.makedirs(data_dir) os.chdir(data_dir) config = RawConfigParser() config.read('config.ini') config.add_section('General') config.set('General', 'web_username', stash['web-username']) config.set('General', 'web_password', stash['web-password']) config.set('General', 'api_key', stash['api-key']) with io.open('config.ini', 'w' if six.PY3 else 'wb') as configfile: config.write(configfile) sys.path.insert(1, app_dir) from medusa.__main__ import Application application = Application() application.start(args)
class ConfigStore(object): def __init__(self, name): self.config = RawConfigParser() self.file_opts = {} if six.PY3: self.file_opts['encoding'] = 'utf-8' if hasattr(appdirs, 'user_config_dir'): config_dir = appdirs.user_config_dir('photini') else: config_dir = appdirs.user_data_dir('photini') if not os.path.isdir(config_dir): os.makedirs(config_dir, mode=stat.S_IRWXU) self.file_name = os.path.join(config_dir, name + '.ini') if name == 'editor': for old_file_name in (os.path.expanduser('~/photini.ini'), os.path.join(config_dir, 'photini.ini')): if os.path.exists(old_file_name): self.config.read(old_file_name, **self.file_opts) self.save() os.unlink(old_file_name) self.config.read(self.file_name, **self.file_opts) self.timer = QtCore.QTimer() self.timer.setSingleShot(True) self.timer.setInterval(3000) self.timer.timeout.connect(self.save) self.has_section = self.config.has_section def get(self, section, option, default=None): if self.config.has_option(section, option): result = self.config.get(section, option) if six.PY2: return result.decode('utf-8') return result if default is not None: self.set(section, option, default) return default def set(self, section, option, value): if not self.config.has_section(section): self.config.add_section(section) if (self.config.has_option(section, option) and self.config.get(section, option) == value): return if six.PY2: value = value.encode('utf-8') self.config.set(section, option, value) self.timer.start() def remove_section(self, section): if not self.config.has_section(section): return for option in self.config.options(section): self.config.remove_option(section, option) self.config.remove_section(section) self.timer.start() def save(self): self.config.write(open(self.file_name, 'w', **self.file_opts)) os.chmod(self.file_name, stat.S_IRUSR | stat.S_IWUSR)
class TestGithubService(TestCase): def setUp(self): self.config = RawConfigParser() self.config.interactive = False self.config.add_section('general') self.config.add_section('mygithub') self.config.set('mygithub', 'service', 'github') self.config.set('mygithub', 'github.login', 'tintin') self.config.set('mygithub', 'github.username', 'milou') self.config.set('mygithub', 'github.password', 't0ps3cr3t') self.service_config = ServiceConfig( GithubService.CONFIG_PREFIX, self.config, 'mygithub') def test_token_authorization_header(self): self.config.remove_option('mygithub', 'github.password') self.config.set('mygithub', 'github.token', '@oracle:eval:echo 1234567890ABCDEF') service = GithubService(self.config, 'general', 'mygithub') self.assertEqual(service.client.session.headers['Authorization'], "token 1234567890ABCDEF") def test_default_host(self): """ Check that if github.host is not set, we default to github.com """ service = GithubService(self.config, 'general', 'mygithub') self.assertEquals("github.com", service.host) def test_overwrite_host(self): """ Check that if github.host is set, we use its value as host """ self.config.set('mygithub', 'github.host', 'github.example.com') service = GithubService(self.config, 'general', 'mygithub') self.assertEquals("github.example.com", service.host) def test_keyring_service(self): """ Checks that the keyring service name """ keyring_service = GithubService.get_keyring_service(self.service_config) self.assertEquals("github://[email protected]/milou", keyring_service) def test_keyring_service_host(self): """ Checks that the keyring key depends on the github host. """ self.config.set('mygithub', 'github.host', 'github.example.com') keyring_service = GithubService.get_keyring_service(self.service_config) self.assertEquals("github://[email protected]/milou", keyring_service) def test_get_repository_from_issue_url__issue(self): issue = dict(repos_url="https://github.com/foo/bar") repository = GithubService.get_repository_from_issue(issue) self.assertEquals("foo/bar", repository) def test_get_repository_from_issue_url__pull_request(self): issue = dict(repos_url="https://github.com/foo/bar") repository = GithubService.get_repository_from_issue(issue) self.assertEquals("foo/bar", repository) def test_get_repository_from_issue__enterprise_github(self): issue = dict(repos_url="https://github.acme.biz/foo/bar") repository = GithubService.get_repository_from_issue(issue) self.assertEquals("foo/bar", repository)
class BaseConfigStore(object): # the actual config store functionality def __init__(self, name, *arg, **kw): super(BaseConfigStore, self).__init__(*arg, **kw) self.dirty = False self.config = RawConfigParser() if hasattr(appdirs, 'user_config_dir'): config_dir = appdirs.user_config_dir('photini') else: config_dir = appdirs.user_data_dir('photini') if not os.path.isdir(config_dir): os.makedirs(config_dir, mode=stat.S_IRWXU) self.file_name = os.path.join(config_dir, name + '.ini') if os.path.isfile(self.file_name): if six.PY2: self.config.readfp(open(self.file_name, 'r')) else: self.config.readfp(open(self.file_name, 'r', encoding='utf-8')) self.has_section = self.config.has_section def get(self, section, option, default=None): if self.config.has_option(section, option): result = self.config.get(section, option) if six.PY2: return result.decode('utf-8') return result if default is not None: self.set(section, option, default) return default def set(self, section, option, value): if six.PY2: value = value.encode('utf-8') if not self.config.has_section(section): self.config.add_section(section) elif (self.config.has_option(section, option) and self.config.get(section, option) == value): return self.config.set(section, option, value) self.dirty = True def remove_section(self, section): if not self.config.has_section(section): return for option in self.config.options(section): self.config.remove_option(section, option) self.config.remove_section(section) self.dirty = True def save(self): if not self.dirty: return if six.PY2: self.config.write(open(self.file_name, 'w')) else: self.config.write(open(self.file_name, 'w', encoding='utf-8')) os.chmod(self.file_name, stat.S_IRUSR | stat.S_IWUSR) self.dirty = False
def test_raise_error_on_loop(self): config = RawConfigParser() config.add_section(self.SECTION) config.set(self.SECTION, 'a-b', 'null') config.set(self.SECTION, 'b-a', 'null') mgr_factory = storage.DefaultInstallMgrFactory(config, self.SECTION, mock.Mock(), {}) self.assertRaises(Exception, mgr_factory.new_install_mgr, mock.Mock(), {})
def generate_mrbob_ini(configurator, directory_path, answers): file_name = u'.mrbob' file_path = directory_path + '/' + file_name file_list = os.listdir(directory_path) if file_name not in file_list: template = """[mr.bob] verbose = False [variables] author.name={0} author.email={1} author.github.user={2} plone.version={3} """.format( answers['author.name'], answers['author.email'], answers['author.github.user'], answers['plone.version'], ) if configurator.variables['configure_mrbob.package.git.disabled']: template = template + 'package.git.disabled={0}'.format( answers['package.git.disabled'], ) if not configurator.variables['configure_mrbob.package.git.disabled']: template = template + """package.git.disabled={0} package.git.init={1} package.git.autocommit={2} """.format( answers['package.git.disabled'], answers['package.git.init'], answers['package.git.autocommit'], ) with open(file_path, 'w') as f: f.write(template) else: # get config file contents lines = [ line.rstrip('\n') for line in codecs.open(file_path, 'r', 'utf-8') ] # NOQA: E501 commented_settings = [ line for line in lines if line and line[0] == '#' ] # NOQA: E501 config = RawConfigParser() # to explicitly convert `key` to str so that we don't get error in # .join() of `value`(of type str) and `key`(of type unicode) # in RawConfigParser.write() method config.optionxform = str config.readfp(codecs.open(file_path, 'r', 'utf-8')) if not config.has_section('variables'): config.add_section('variables') for key, value in answers.items(): config.set('variables', key, value) with open(file_path, 'w') as mrbob_config_file: config.write(mrbob_config_file) # append commented settings at the end with codecs.open(file_path, 'a', 'utf-8') as mrbob_config_file: mrbob_config_file.writelines(commented_settings)
def test_build_install_mgr_factory_ok(self): factory_builder = storage.DefaultInstallMgrFactoryBuilder( mock.Mock(), {}) config = RawConfigParser() config.add_section(self.SECTION) config.set(self.SECTION, 'a-b', 'null') factory = factory_builder.build_install_mgr_factory( config, self.SECTION) self.assertTrue(isinstance(factory, storage.DefaultInstallMgrFactory))
def test_missing_sha1sum_raise_error(self): builder = storage.DefaultRemoteFileBuilder(self._cache_dir, self._downloaders) config = RawConfigParser() config.add_section(self.SECTION) config.set(self.SECTION, 'url', 'http://example.org/foo.zip') config.set(self.SECTION, 'size', '1') self.assertRaises(Exception, builder.build_remote_file, config, self.SECTION)
def test_missing_url_raise_error(self): builder = storage.DefaultRemoteFileBuilder(self._cache_dir, self._downloaders) config = RawConfigParser() config.add_section(self.SECTION) config.set(self.SECTION, 'size', '1') config.set(self.SECTION, 'sha1sum', self.SHA1SUM) self.assertRaises(Exception, builder.build_remote_file, config, self.SECTION)
def constructConfigParser(): """ returns a pre-setup config parser """ parser = RawConfigParser() parser.read([CONFIG_SYSTEM, CONFIG_USER]) if not parser.has_section(GERALD_CONFIG_SECTION): parser.add_section(GERALD_CONFIG_SECTION) return parser
def test_ok_on_all_mandatory_parameters_specified(self): builder = storage.DefaultRemoteFileBuilder(self._cache_dir, self._downloaders) config = RawConfigParser() config.add_section(self.SECTION) config.set(self.SECTION, 'url', 'http://example.org/foo.zip') config.set(self.SECTION, 'size', '1') config.set(self.SECTION, 'sha1sum', self.SHA1SUM) xfile = builder.build_remote_file(config, self.SECTION) self.assertEqual('foo.zip', xfile.filename)
def test_default_downloader_is_looked_up_in_dict_if_unspecified(self): downloaders = mock.MagicMock() builder = storage.DefaultRemoteFileBuilder(self._cache_dir, downloaders) config = RawConfigParser() config.add_section(self.SECTION) config.set(self.SECTION, 'url', 'http://example.org/foo.zip') config.set(self.SECTION, 'size', '1') config.set(self.SECTION, 'sha1sum', self.SHA1SUM) builder.build_remote_file(config, self.SECTION) downloaders.__getitem__.assert_called_once_with('default')
def test_specified_filename_override_implicit(self): builder = storage.DefaultRemoteFileBuilder(self._cache_dir, self._downloaders) config = RawConfigParser() config.add_section(self.SECTION) config.set(self.SECTION, 'url', 'http://example.org/foo.zip') config.set(self.SECTION, 'filename', 'bar.zip') config.set(self.SECTION, 'size', '1') config.set(self.SECTION, 'sha1sum', self.SHA1SUM) xfile = builder.build_remote_file(config, self.SECTION) self.assertEqual('bar.zip', xfile.filename)
def _merge_from_file(self, config_file): """ Merge variables from ``config_file`` into the environment. Any variables in ``config_file`` that have already been set will be ignored (meaning this method will *not* try to override them, which would raise an exception). If ``config_file`` does not exist or is not a regular file, or if there is an error parsing ``config_file``, ``None`` is returned. Otherwise this method returns a ``(num_set, num_total)`` tuple containing first the number of variables that were actually set, and second the total number of variables found in ``config_file``. This method will raise a ``ValueError`` if ``config_file`` is not an absolute path. For example: >>> env = Env() >>> env._merge_from_file('my/config.conf') Traceback (most recent call last): ... ValueError: config_file must be an absolute path; got 'my/config.conf' Also see `Env._merge()`. :param config_file: Absolute path of the configuration file to load. """ if path.abspath(config_file) != config_file: raise ValueError( 'config_file must be an absolute path; got %r' % config_file ) if not path.isfile(config_file): return parser = RawConfigParser() try: parser.read(config_file) except ParsingError: return if not parser.has_section(CONFIG_SECTION): parser.add_section(CONFIG_SECTION) items = parser.items(CONFIG_SECTION) if len(items) == 0: return (0, 0) i = 0 for (key, value) in items: if key not in self: self[key] = value i += 1 if 'config_loaded' not in self: # we loaded at least 1 file self['config_loaded'] = True return (i, len(items))
def convert_config_to_tribler71(current_config, state_dir=None): """ Convert the Config files libtribler.conf and tribler.conf to the newer triblerd.conf and cleanup the files when we are done. :param: current_config: the current config in which we merge the old config files. :return: the newly edited TriblerConfig object with the old data inserted. """ state_dir = state_dir or TriblerConfig.get_default_state_dir() libtribler_file_loc = os.path.join(state_dir, "libtribler.conf") if os.path.exists(libtribler_file_loc): libtribler_cfg = RawConfigParser() libtribler_cfg.read(libtribler_file_loc) current_config = add_libtribler_config(current_config, libtribler_cfg) os.remove(libtribler_file_loc) tribler_file_loc = os.path.join(state_dir, "tribler.conf") if os.path.exists(tribler_file_loc): tribler_cfg = RawConfigParser() tribler_cfg.read(tribler_file_loc) current_config = add_tribler_config(current_config, tribler_cfg) os.remove(tribler_file_loc) # We also have to update all existing downloads, in particular, rename the section 'downloadconfig' to # 'download_defaults'. for _, filename in enumerate( iglob(os.path.join(state_dir, STATEDIR_DLPSTATE_DIR, '*.state'))): download_cfg = RawConfigParser() try: with open(filename) as cfg_file: download_cfg.readfp(cfg_file, filename=filename) except MissingSectionHeaderError: logger.error( "Removing download state file %s since it appears to be corrupt", filename) os.remove(filename) try: download_items = download_cfg.items("downloadconfig") download_cfg.add_section("download_defaults") for download_item in download_items: download_cfg.set("download_defaults", download_item[0], download_item[1]) download_cfg.remove_section("downloadconfig") with open(filename, "w") as output_config_file: download_cfg.write(output_config_file) except (NoSectionError, DuplicateSectionError): # This item has already been converted pass return current_config
def _test_write_pkispawn_config_file(self, template, expected): """ Test that the values we read from an ExternalCAProfile object can be used to produce a reasonable-looking pkispawn configuration. """ config = RawConfigParser() config.optionxform = str config.add_section("CA") config.set("CA", "pki_req_ext_oid", template.ext_oid) config.set("CA", "pki_req_ext_data", hexlify(template.get_ext_data()).decode('ascii')) out = StringIO() config.write(out) assert out.getvalue() == expected
def generate_conf_file_contents(conf_sections): cfg_parser = RawConfigParser() for (section, var, val) in conf_sections: try: cfg_parser.add_section(section) except DuplicateSectionError: pass if not var: continue if val == '': cfg_parser.set(section, var, 'empty') else: cfg_parser.set(section, var, val) return cfg_parser
def convert_config_to_tribler71(current_config, state_dir=None): """ Convert the Config files libtribler.conf and tribler.conf to the newer triblerd.conf and cleanup the files when we are done. :param: current_config: the current config in which we merge the old config files. :return: the newly edited TriblerConfig object with the old data inserted. """ state_dir = state_dir or TriblerConfig.get_default_state_dir() libtribler_file_loc = os.path.join(state_dir, "libtribler.conf") if os.path.exists(libtribler_file_loc): libtribler_cfg = RawConfigParser() libtribler_cfg.read(libtribler_file_loc) current_config = add_libtribler_config(current_config, libtribler_cfg) os.remove(libtribler_file_loc) tribler_file_loc = os.path.join(state_dir, "tribler.conf") if os.path.exists(tribler_file_loc): tribler_cfg = RawConfigParser() tribler_cfg.read(tribler_file_loc) current_config = add_tribler_config(current_config, tribler_cfg) os.remove(tribler_file_loc) # We also have to update all existing downloads, in particular, rename the section 'downloadconfig' to # 'download_defaults'. for _, filename in enumerate(iglob( os.path.join(state_dir, STATEDIR_DLPSTATE_DIR, '*.state'))): download_cfg = RawConfigParser() try: with open(filename) as cfg_file: download_cfg.readfp(cfg_file, filename=filename) except MissingSectionHeaderError: logger.error("Removing download state file %s since it appears to be corrupt", filename) os.remove(filename) try: download_items = download_cfg.items("downloadconfig") download_cfg.add_section("download_defaults") for download_item in download_items: download_cfg.set("download_defaults", download_item[0], download_item[1]) download_cfg.remove_section("downloadconfig") with open(filename, "w") as output_config_file: download_cfg.write(output_config_file) except (NoSectionError, DuplicateSectionError): # This item has already been converted pass return current_config
def set_config(self, path, value): """Set entry in local configuration.""" section, option = path.split('.', 1) filename = os.path.join(self.path, '.hg', 'hgrc') if six.PY2: value = value.encode('utf-8') section = section.encode('utf-8') option = option.encode('utf-8') config = RawConfigParser() config.read(filename) if not config.has_section(section): config.add_section(section) if (config.has_option(section, option) and config.get(section, option) == value): return config.set(section, option, value) with open(filename, 'w') as handle: config.write(handle)
def get_config(path=None): # type: (str) -> RawConfigParser """ Returns a ConfigParser with our current configuration. """ if path is None: path = default_config_path() defaults = { 'base_url': DEFAULT_BASE_URL, } config = RawConfigParser(defaults) config.read([path]) if not config.has_section('serverapi'): config.add_section('serverapi') return config
def set_config(self, path, value): """ Set entry in local configuration. """ section, option = path.split(".", 1) filename = os.path.join(self.path, ".hg", "hgrc") if six.PY2: value = value.encode("utf-8") section = section.encode("utf-8") option = option.encode("utf-8") config = RawConfigParser() config.read(filename) if not config.has_section(section): config.add_section(section) if config.has_option(section, option) and config.get(section, option) == value: return config.set(section, option, value) with open(filename, "w") as handle: config.write(handle)
def set_config(self, path, value): """Set entry in local configuration.""" if not self.lock.is_locked: raise RuntimeError('Repository operation without lock held!') section, option = path.split('.', 1) filename = os.path.join(self.path, '.hg', 'hgrc') if six.PY2: value = value.encode('utf-8') section = section.encode('utf-8') option = option.encode('utf-8') config = RawConfigParser() config.read(filename) if not config.has_section(section): config.add_section(section) if config.has_option(section, option) and config.get(section, option) == value: return config.set(section, option, value) with open(filename, 'w') as handle: config.write(handle)
def _merge_from_file(self, config_file): """ Merge variables from ``config_file`` into the environment. Any variables in ``config_file`` that have already been set will be ignored (meaning this method will *not* try to override them, which would raise an exception). If ``config_file`` does not exist or is not a regular file, or if there is an error parsing ``config_file``, ``None`` is returned. Otherwise this method returns a ``(num_set, num_total)`` tuple containing first the number of variables that were actually set, and second the total number of variables found in ``config_file``. Also see `Env._merge()`. :param config_file: Path of the configuration file to load. """ if not path.isfile(config_file): return None parser = RawConfigParser() try: parser.read(config_file) except ParsingError: return None if not parser.has_section(CONFIG_SECTION): parser.add_section(CONFIG_SECTION) items = parser.items(CONFIG_SECTION) if len(items) == 0: return 0, 0 i = 0 for (key, value) in items: if key not in self: self[key] = value i += 1 if 'config_loaded' not in self: # we loaded at least 1 file self['config_loaded'] = True return i, len(items)
def edit_config(filename, settings, dry_run=False): """Edit a configuration file to include `settings` `settings` is a dictionary of dictionaries or ``None`` values, keyed by command/section name. A ``None`` value means to delete the entire section, while a dictionary lists settings to be changed or deleted in that section. A setting of ``None`` means to delete that setting. """ from six.moves.configparser import RawConfigParser log.debug("Reading configuration from %s", filename) opts = RawConfigParser() opts.read([filename]) for section, options in list(settings.items()): if options is None: log.info("Deleting section [%s] from %s", section, filename) opts.remove_section(section) else: if not opts.has_section(section): log.debug("Adding new section [%s] to %s", section, filename) opts.add_section(section) for option, value in list(options.items()): if value is None: log.debug("Deleting %s.%s from %s", section, option, filename) opts.remove_option(section, option) if not opts.options(section): log.info("Deleting empty [%s] section from %s", section, filename) opts.remove_section(section) else: log.debug("Setting %s.%s to %r in %s", section, option, value, filename) opts.set(section, option, value) log.info("Writing %s", filename) if not dry_run: f = open(filename, 'w') opts.write(f) f.close()
def update_or_create_build_telemetry_config(path): """Write a mach config file enabling build telemetry to `path`. If the file does not exist, create it. If it exists, add the new setting to the existing data. This is standalone from mach's `ConfigSettings` so we can use it during bootstrap without a source checkout. """ config = RawConfigParser() if os.path.exists(path): try: config.read([path]) except ConfigParserError as e: print( 'Your mach configuration file at `{path}` is not parseable:\n{error}' .format(path=path, error=e)) return False if not config.has_section('build'): config.add_section('build') config.set('build', 'telemetry', 'true') with open(path, 'wb') as f: config.write(f) return True
def write(self, filename, liftplan=False): assert self.draft.start_at_lowest_thread config = RawConfigParser() config.optionxform = str config.add_section('CONTENTS') self.write_metadata(config, liftplan=liftplan) wif_palette = self.write_palette(config) self.write_threads(config, wif_palette, 'warp') self.write_threads(config, wif_palette, 'weft') self.write_threading(config) if liftplan or not self.draft.treadles: self.write_liftplan(config) else: self.write_treadling(config) self.write_tieup(config) with open(filename, 'wb') as f: config.write(f)
sys.exit(1) TRELLO_GET_BOARDS = 'https://trello.com/1/members/me/boards/' # TRELLO_GET_BOARD = 'https://trello.com/1/boards/'+boardid+'/cards' retry = False while True: # try: # # board ids are 14-len hex # if len(BOARD)!=14: # raise ValueError() # int('0x'+BOARD, 16) # break # except ValueError: # board name -> id through cfg try: cache.add_section('trello_boards') except configparser.DuplicateSectionError: pass boardid = safeget(cache, 'trello_boards', BOARD) if boardid: BOARDNAME = BOARD BOARD = boardid break if retry: print('Board not found: ' + BOARD) sys.exit(1) # unknown ID, dump trello board names and ids params = { 'key': TRELLO_API_KEY,
def __spawn_instance(self): """ Create and configure a new KRA instance using pkispawn. Creates a configuration file with IPA-specific parameters and passes it to the base class to call pkispawn """ # Create an empty and secured file (cfg_fd, cfg_file) = tempfile.mkstemp() os.close(cfg_fd) pent = pwd.getpwnam(self.service_user) os.chown(cfg_file, pent.pw_uid, pent.pw_gid) self.tmp_agent_db = tempfile.mkdtemp(prefix="tmp-", dir=paths.VAR_LIB_IPA) tmp_agent_pwd = ipautil.ipa_generate_password() # Create a temporary file for the admin PKCS #12 file (admin_p12_fd, admin_p12_file) = tempfile.mkstemp() os.close(admin_p12_fd) # Create KRA configuration config = RawConfigParser() config.optionxform = str config.add_section("KRA") # Security Domain Authentication config.set("KRA", "pki_security_domain_https_port", "443") config.set("KRA", "pki_security_domain_password", self.admin_password) config.set("KRA", "pki_security_domain_user", self.admin_user) # issuing ca config.set("KRA", "pki_issuing_ca_uri", "https://%s" % ipautil.format_netloc(self.fqdn, 443)) # Server config.set("KRA", "pki_enable_proxy", "True") config.set("KRA", "pki_restart_configured_instance", "False") config.set("KRA", "pki_backup_keys", "True") config.set("KRA", "pki_backup_password", self.admin_password) # Client security database config.set("KRA", "pki_client_database_dir", self.tmp_agent_db) config.set("KRA", "pki_client_database_password", tmp_agent_pwd) config.set("KRA", "pki_client_database_purge", "True") config.set("KRA", "pki_client_pkcs12_password", self.admin_password) # Administrator config.set("KRA", "pki_admin_name", self.admin_user) config.set("KRA", "pki_admin_uid", self.admin_user) config.set("KRA", "pki_admin_email", "root@localhost") config.set("KRA", "pki_admin_password", self.admin_password) config.set("KRA", "pki_admin_nickname", "ipa-ca-agent") config.set("KRA", "pki_admin_subject_dn", str(DN(('cn', 'ipa-ca-agent'), self.subject_base))) config.set("KRA", "pki_import_admin_cert", "False") config.set("KRA", "pki_client_admin_cert_p12", admin_p12_file) # Directory server config.set("KRA", "pki_ds_ldap_port", "389") config.set("KRA", "pki_ds_password", self.dm_password) config.set("KRA", "pki_ds_base_dn", str(self.basedn)) config.set("KRA", "pki_ds_database", "ipaca") config.set("KRA", "pki_ds_create_new_db", "False") self._use_ldaps_during_spawn(config) # Certificate subject DNs config.set("KRA", "pki_subsystem_subject_dn", str(DN(('cn', 'CA Subsystem'), self.subject_base))) config.set("KRA", "pki_sslserver_subject_dn", str(DN(('cn', self.fqdn), self.subject_base))) config.set("KRA", "pki_audit_signing_subject_dn", str(DN(('cn', 'KRA Audit'), self.subject_base))) config.set( "KRA", "pki_transport_subject_dn", str(DN(('cn', 'KRA Transport Certificate'), self.subject_base))) config.set( "KRA", "pki_storage_subject_dn", str(DN(('cn', 'KRA Storage Certificate'), self.subject_base))) # Certificate nicknames # Note that both the server certs and subsystem certs reuse # the ca certs. config.set("KRA", "pki_subsystem_nickname", "subsystemCert cert-pki-ca") config.set("KRA", "pki_sslserver_nickname", "Server-Cert cert-pki-ca") config.set("KRA", "pki_audit_signing_nickname", "auditSigningCert cert-pki-kra") config.set("KRA", "pki_transport_nickname", "transportCert cert-pki-kra") config.set("KRA", "pki_storage_nickname", "storageCert cert-pki-kra") # Shared db settings # Needed because CA and KRA share the same database # We will use the dbuser created for the CA config.set("KRA", "pki_share_db", "True") config.set( "KRA", "pki_share_dbuser_dn", str(DN(('uid', 'pkidbuser'), ('ou', 'people'), ('o', 'ipaca')))) if not (os.path.isdir(paths.PKI_TOMCAT_ALIAS_DIR) and os.path.isfile(paths.PKI_TOMCAT_PASSWORD_CONF)): # generate pin which we know can be used for FIPS NSS database pki_pin = ipautil.ipa_generate_password() config.set("KRA", "pki_pin", pki_pin) else: pki_pin = None _p12_tmpfile_handle, p12_tmpfile_name = tempfile.mkstemp(dir=paths.TMP) if self.clone: krafile = self.pkcs12_info[0] shutil.copy(krafile, p12_tmpfile_name) pent = pwd.getpwnam(self.service_user) os.chown(p12_tmpfile_name, pent.pw_uid, pent.pw_gid) # Security domain registration config.set("KRA", "pki_security_domain_hostname", self.fqdn) config.set("KRA", "pki_security_domain_https_port", "443") config.set("KRA", "pki_security_domain_user", self.admin_user) config.set("KRA", "pki_security_domain_password", self.admin_password) # Clone config.set("KRA", "pki_clone", "True") config.set("KRA", "pki_clone_pkcs12_path", p12_tmpfile_name) config.set("KRA", "pki_clone_pkcs12_password", self.dm_password) config.set("KRA", "pki_clone_setup_replication", "False") config.set( "KRA", "pki_clone_uri", "https://%s" % ipautil.format_netloc(self.master_host, 443)) else: # the admin cert file is needed for the first instance of KRA cert = self.get_admin_cert() # First make sure that the directory exists parentdir = os.path.dirname(paths.ADMIN_CERT_PATH) if not os.path.exists(parentdir): os.makedirs(parentdir) with open(paths.ADMIN_CERT_PATH, "wb") as admin_path: admin_path.write( base64.b64encode(cert.public_bytes(x509.Encoding.DER))) # Generate configuration file with open(cfg_file, "w") as f: config.write(f) try: DogtagInstance.spawn_instance(self, cfg_file, nolog_list=(self.dm_password, self.admin_password, pki_pin, tmp_agent_pwd)) finally: os.remove(p12_tmpfile_name) os.remove(cfg_file) os.remove(admin_p12_file) shutil.move(paths.KRA_BACKUP_KEYS_P12, paths.KRACERT_P12) logger.debug("completed creating KRA instance")
from __future__ import print_function import os from six.moves.configparser import RawConfigParser __author__ = 'alforbes' try: CONFIG_FILE = os.environ['ORLO_CONFIG'] except KeyError: CONFIG_FILE = '/etc/orlo/orlo.ini' config = RawConfigParser() config.add_section('main') config.set('main', 'debug_mode', 'false') config.set('main', 'propagate_exceptions', 'true') config.set('main', 'time_format', '%Y-%m-%dT%H:%M:%SZ') config.set('main', 'time_zone', 'UTC') config.set('main', 'strict_slashes', 'false') config.set('main', 'base_url', 'http://localhost:8080') config.add_section('security') config.set('security', 'enabled', 'false') config.set('security', 'passwd_file', 'none') config.set('security', 'secret_key', 'change_me') # NOTE: orlo.__init__ checks that secret_key is not "change_me" when security # is enabled. Do not change the default here without updating __init__ as well. config.set('security', 'token_ttl', '3600') config.set('security', 'ldap_server', 'localhost.localdomain') config.set('security', 'ldap_port', '389') config.set('security', 'user_base_dn', 'ou=people,ou=example,o=test')
def generate_logconf_file_contents(): cfg_parser = RawConfigParser() cfg_parser.add_section('formatters') cfg_parser.add_section('formatter_simple') cfg_parser.set('formatters', 'keys', 'simple') cfg_parser.set('formatter_simple', 'format', '%(name)s:%(levelname)s: %(message)s') cfg_parser.add_section('handlers') cfg_parser.add_section('handler_console') cfg_parser.add_section('handler_api_server_file') cfg_parser.set('handlers', 'keys', 'console,api_server_file') cfg_parser.set('handler_console', 'class', 'StreamHandler') cfg_parser.set('handler_console', 'level', 'WARN') cfg_parser.set('handler_console', 'args', '[]') cfg_parser.set('handler_console', 'formatter', 'simple') cfg_parser.set('handler_api_server_file', 'class', 'FileHandler') cfg_parser.set('handler_api_server_file', 'level', 'INFO') cfg_parser.set('handler_api_server_file', 'formatter', 'simple') cfg_parser.set('handler_api_server_file', 'args', "('api_server.log',)") cfg_parser.add_section('loggers') cfg_parser.add_section('logger_root') cfg_parser.add_section('logger_FakeWSGIHandler') cfg_parser.set('loggers', 'keys', 'root,FakeWSGIHandler') cfg_parser.set('logger_root', 'level', 'WARN') cfg_parser.set('logger_root', 'handlers', 'console') cfg_parser.set('logger_FakeWSGIHandler', 'level', 'INFO') cfg_parser.set('logger_FakeWSGIHandler', 'qualname', 'FakeWSGIHandler') cfg_parser.set('logger_FakeWSGIHandler', 'handlers', 'api_server_file') return cfg_parser
'ORLO_LOGDIR': '/var/log/orlo', } for var, default in defaults.items(): try: defaults[var] = os.environ[var] except KeyError: pass # Horrible hack, but I am tired if os.environ.get('READTHEDOCS', None) == 'True': defaults['ORLO_LOGDIR'] = 'disabled' config = RawConfigParser() config.add_section('main') config.set('main', 'time_format', '%Y-%m-%dT%H:%M:%SZ') config.set('main', 'time_zone', 'UTC') config.set('main', 'base_url', 'http://localhost:8080') config.add_section('gunicorn') config.set('gunicorn', 'workers', '2') config.set('gunicorn', 'bind', '127.0.0.1:8080') config.add_section('security') config.set('security', 'enabled', 'false') config.set('security', 'passwd_file', 'none') config.set('security', 'secret_key', 'change_me') # NOTE: orlo.__init__ checks that secret_key is not "change_me" when security # is enabled. Do not change the default here without updating __init__ as well. config.set('security', 'token_ttl', '3600')
class ConfigStore(QtCore.QObject): def __init__(self, name, *arg, **kw): super(ConfigStore, self).__init__(*arg, **kw) QtCore.QCoreApplication.instance().aboutToQuit.connect(self.shutdown) self.config = RawConfigParser() if hasattr(appdirs, 'user_config_dir'): config_dir = appdirs.user_config_dir('photini') else: config_dir = appdirs.user_data_dir('photini') if not os.path.isdir(config_dir): os.makedirs(config_dir, mode=stat.S_IRWXU) self.file_name = os.path.join(config_dir, name + '.ini') if os.path.isfile(self.file_name): if six.PY2: self.config.readfp(open(self.file_name, 'r')) else: self.config.readfp(open(self.file_name, 'r', encoding='utf-8')) self.timer = QtCore.QTimer(self) self.timer.setSingleShot(True) self.timer.setInterval(3000) self.timer.timeout.connect(self.save) self.has_section = self.config.has_section def get(self, section, option, default=None): if self.config.has_option(section, option): result = self.config.get(section, option) if six.PY2: return result.decode('utf-8') return result if default is not None: self.set(section, option, default) return default def set(self, section, option, value): if six.PY2: value = value.encode('utf-8') if not self.config.has_section(section): self.config.add_section(section) elif (self.config.has_option(section, option) and self.config.get(section, option) == value): return self.config.set(section, option, value) self.timer.start() def remove_section(self, section): if not self.config.has_section(section): return for option in self.config.options(section): self.config.remove_option(section, option) self.config.remove_section(section) self.timer.start() @QtCore.pyqtSlot() def shutdown(self): if self.timer.isActive(): self.timer.stop() self.save() @QtCore.pyqtSlot() def save(self): if six.PY2: self.config.write(open(self.file_name, 'w')) else: self.config.write(open(self.file_name, 'w', encoding='utf-8')) os.chmod(self.file_name, stat.S_IRUSR | stat.S_IWUSR)
class Imap2UsbLamp(object): def __init__(self, port): self.port = port self.usblamp = None self.pwd_queue = (Queue(), Queue()) self.stop = Event() self.config_path = None self.parser = None self.config = {} self.get_config() def start_server(self, usblamp): self.usblamp = usblamp # create check_unseen and server threads t1 = Thread(target=self.check_unseen, args=(self, True)) t1.daemon = True t1.start() t2 = Thread(target=self.server, args=(self, )) t2.daemon = True t2.start() return (t1, t2) def get_config(self): # read config file self.config_path = path.expanduser(path.join('~', CONFIG_FILE_NAME)) self.parser = RawConfigParser() if path.exists(self.config_path): self.parser.read(self.config_path) # read config and initialize if no content if not self.parser.has_section(IMAP_SECTION): self.parser.add_section(IMAP_SECTION) if self.parser.has_option(IMAP_SECTION, 'Services'): try: services = eval(self.parser.get(IMAP_SECTION, 'Services')) logger.debug("Service = %s" % (str(services))) except Exception as e: # no valid config logger.error(str(e)) return # read each config fixed = False for s in services: self.config[s] = {} try: for k in self.parser.options(s): self.config[s][k] = self.parser.get(s, k) # logger.debug("%s = %s" % (s, str(self.config[s]))) except NoSectionError as e: # missing config logger.error(str(e)) services.remove(s) self.parser.set(IMAP_SECTION, 'Services', services) self.config.pop(s) fixed = True # save modification if fixed: with open(self.config_path, 'wb') as f: self.parser.write(f) logger.info('*** Config file "%s" saved.' % self.config_path) def add_config(self, section): print('\nSetup IMAP service.\n') print('Please enter the following information for %s.' % section) self.config[section] = {} self.config[section]['host'] = raw_input('Host: ').strip() self.config[section]['mailbox'] = raw_input('Mailbox: ').strip() self.config[section]['username'] = raw_input('Username: '******'Oauth2 (y/n): ').lower().strip() if oauth not in ['y', 'n']: print('Please enter "y" or "n" only.') else: break if oauth == 'y': if sys.version_info >= (3, ): from .oauth2 import GeneratePermissionUrl, AuthorizeTokens else: from oauth2 import GeneratePermissionUrl, AuthorizeTokens import webbrowser client_id = raw_input('Client ID: ').strip() secret = raw_input('Client Secret: ').strip() print( '\nWeb browser will open soon. Please click "Allow access" and copy the verification code.\n' ) url = GeneratePermissionUrl(client_id) webbrowser.open(url, new=2) code = raw_input('Verification Code: ').strip() token = AuthorizeTokens(client_id, secret, code) logger.debug("Refresh Token: %s" % (token['refresh_token'])) logger.debug("Access Token: %s" % (token['access_token'])) logger.info("Access Token Expiration Seconds: %s" % (token['expires_in'])) self.config[section]['client_id'] = client_id self.config[section]['secret'] = secret self.config[section]['token'] = token # interval while True: try: self.config[section]['interval'] = int( raw_input('Refresh interval (in minutes): ')) break except: print('\nPlease enter an integer.\n') # color while True: color = raw_input('LED color in RR,GG,BB (0~%d): ' % USBLamp.RGB_MAX).strip(',') done = 0 try: for i in color.split(','): i = int(i) if 0 <= i <= USBLamp.RGB_MAX: done += 1 else: break except: pass if done == 3: self.config[section]['color'] = '(' + color + ')' break else: print('\nPlease enter 3 integers (0~%d) separate by ",".\n' % USBLamp.RGB_MAX) # delay while True: try: self.config[section]['delay'] = float( eval('1.0*' + raw_input('Fading delay (0 for no fading): '))) break except: print('\nPlease enter a floating number.\n') services = [] if self.parser.has_option(IMAP_SECTION, 'Services'): try: services = eval(self.parser.get(IMAP_SECTION, 'Services')) except: pass services.append(section) self.parser.set(IMAP_SECTION, 'Services', services) self.parser.add_section(section) for k, v in self.config[section].items(): self.parser.set(section, k, v) with open(self.config_path, 'wb') as f: self.parser.write(f) logger.info('*** Config file "%s" saved.' % self.config_path) @staticmethod def check_unseen(imap, loop=False): from time import time if sys.version_info >= (3, ): from .oauth2 import RefreshToken from .oauth2 import GenerateOAuth2String else: from oauth2 import RefreshToken from oauth2 import GenerateOAuth2String task = Queue() timeout_or_pwd = {} rx_pwd = {} # trigger all config for name, config in imap.config.items(): if config.get('token'): # refresh token def refresh_token(config): config['token'] = eval(config['token']) config['token'] = RefreshToken( config['clientid'], config['secret'], config['token']['refresh_token']) return time() + float(config['token']['expires_in']) - 1 timeout_or_pwd[name] = refresh_token(config) else: timeout_or_pwd[name] = config.get('password', '') task.put(name) # process while True: # check if got password try: config_name, pwd = imap.pwd_queue[0].get(block=False) rx_pwd[config_name] = pwd except Empty: # check if got config try: config_name = task.get(block=False) except Empty: # do delay and check if stop if imap.stop.wait(CHECK_QUEUE_INTERVAL): break else: continue # access imap unseen = 0 invalid_pwd = False config = imap.config[config_name] mailbox = None if config.get('token'): # oauth if time() > timeout_or_pwd[config_name]: timeout_or_pwd[config_name] = refresh_token(config) auth_string = GenerateOAuth2String( config['username'], config['token']['access_token'], False) mailbox = imaplib.IMAP4_SSL(config['host']) if DEBUG > 1: mailbox.debug = 4 mailbox.authenticate('XOAUTH2', lambda x: auth_string) else: # non-oauth if timeout_or_pwd[config_name]: mailbox = imaplib.IMAP4_SSL(config['host']) if DEBUG > 1: mailbox.debug = 4 mailbox.login(config['username'], timeout_or_pwd[config_name]) else: logger.debug("%s, Waiting password." % config_name) try: if rx_pwd.get(config_name, ''): pwd = rx_pwd.pop(config_name) mailbox = imaplib.IMAP4_SSL(config['host']) if DEBUG > 1: mailbox.debug = 4 mailbox.login(config['username'], pwd) timeout_or_pwd[config_name] = pwd imap.pwd_queue[1].put('OK for %s' % config_name) else: invalid_pwd = True except (imaplib.IMAP4.abort, imaplib.IMAP4.error): imap.pwd_queue[1].put('%s Error: Wrong password!' % config_name) invalid_pwd = True except Exception as e: imap.pwd_queue[1].put('%s Error: %s!' % (config_name, str(e))) invalid_pwd = True if not invalid_pwd: # check status if config.get('search'): mailbox.select(config['mailbox']) typ, data = mailbox.search(None, config['search']) if typ == 'OK': unseen = len(data[0].split()) logger.info("%s: %d messages match '%s'" % (config_name, unseen, config['search'])) else: typ, data = mailbox.status(config['mailbox'], '(Messages UnSeen)') if typ == 'OK': total, unseen = re.search( 'Messages\s+(\d+)\s+UnSeen\s+(\d+)', decode(data[0]), re.I).groups() unseen = int(unseen) logger.info("%s: %s messages and %s unseen" % (config_name, total, unseen)) # control usblamp if unseen: delay = float(config['delay']) color = eval(config['color']) if delay: imap.usblamp.start_fading(delay, color) else: imap.usblamp.set_color(color) else: imap.usblamp.off() delay = float(config['interval']) * (1 if DEBUG else 60) else: delay = CHECK_QUEUE_INTERVAL if mailbox: mailbox.logout() if not loop: break # do delay and check if stop if imap.stop.wait(delay): break else: # schedule next check task.put(config_name) logger.debug("*** check_unseen thread exited.") @staticmethod def server(imap): import socket no_pwd = [ k for k, v in imap.config.items() if not v.get('token') and not v.get('password') ] sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server_address = ('localhost', imap.port) try: sock.bind(server_address) except Exception as e: logger.error(str(e)) imap.stop.set() raise logger.info("*** Server started on %s" % str(server_address)) sock.listen(1) stop = False while not stop and not imap.stop.is_set(): conn, client_address = sock.accept() logger.info("Server get connection from %s" % str(client_address)) try: while True: data = decode(conn.recv(64)) if not data: break if 'password' in data.lower(): temp = ','.join(data.split(',')[:2]) logger.info("Server received %s,xxx" % temp) else: logger.info("Server received %s" % data) msg = '' if data == 'stop': conn.sendall(encode('ok')) stop = True break elif data == 'status': for k, v in imap.config.items(): if k in no_pwd: msg += '%s: Waiting password for %s\n' % ( k, v['username']) else: msg += '%s: Working\n' % k elif str.startswith(data.lower(), 'password'): data = data.split(',') if data[1] not in no_pwd: msg += '%s is not a valid config name.\n' % data[1] else: imap.pwd_queue[0].put(data[1:3]) ret = imap.pwd_queue[1].get() if str.startswith(ret, 'OK'): no_pwd.remove(data[1]) msg += ret elif data == 'exit': break if msg: conn.sendall(encode(msg)) finally: conn.close() sock.close() logger.debug("*** server thread exited.") if stop: imap.usblamp.exit() imap.stop.set() @staticmethod def client(port): import socket sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server_address = ('localhost', port) try: sock.connect(server_address) except socket.error: print('\nPlease start server first.\n') sys.exit() try: while True: print('\nImap2UsbLamp Status:\n') sock.sendall(encode('status')) print(decode(sock.recv(1024))) cmd = raw_input('Command: ').replace(' ', '').lower() if cmd == 'stop': sock.sendall(encode(cmd)) print('\n' + decode(sock.recv(1024))) break elif cmd == 'exit': sock.sendall(encode(cmd)) break elif cmd == 'password': import getpass while True: try: cfg = int(raw_input('IMAP_?: ').replace(' ', '')) break except: print('\nPlease enter an integer.\n') pwd = getpass.getpass() cfg = str(cfg) sock.sendall(encode((','.join([cmd, 'IMAP_' + cfg, pwd])))) print('\n' + decode(sock.recv(1024))) finally: sock.close() sys.exit()
from __future__ import print_function import os from six.moves.configparser import RawConfigParser __author__ = 'alforbes' try: CONFIG_FILE = os.environ['ORLO_CONFIG'] except KeyError: CONFIG_FILE = '/etc/orlo/orlo.ini' config = RawConfigParser() config.add_section('main') config.set('main', 'time_format', '%Y-%m-%dT%H:%M:%SZ') config.set('main', 'time_zone', 'UTC') config.set('main', 'strict_slashes', 'false') config.set('main', 'base_url', 'http://localhost:8080') config.add_section('gunicorn') config.set('gunicorn', 'workers', '4') config.add_section('security') config.set('security', 'enabled', 'false') config.set('security', 'passwd_file', 'none') config.set('security', 'secret_key', 'change_me') # NOTE: orlo.__init__ checks that secret_key is not "change_me" when security # is enabled. Do not change the default here without updating __init__ as well. config.set('security', 'token_ttl', '3600') config.set('security', 'ldap_server', 'localhost.localdomain') config.set('security', 'ldap_port', '389')
def __spawn_instance(self): """ Create and configure a new KRA instance using pkispawn. Creates a configuration file with IPA-specific parameters and passes it to the base class to call pkispawn """ # Create an empty and secured file (cfg_fd, cfg_file) = tempfile.mkstemp() os.close(cfg_fd) pent = pwd.getpwnam(self.service_user) os.chown(cfg_file, pent.pw_uid, pent.pw_gid) self.tmp_agent_db = tempfile.mkdtemp( prefix="tmp-", dir=paths.VAR_LIB_IPA) tmp_agent_pwd = ipautil.ipa_generate_password() # Create a temporary file for the admin PKCS #12 file (admin_p12_fd, admin_p12_file) = tempfile.mkstemp() os.close(admin_p12_fd) # Create KRA configuration config = RawConfigParser() config.optionxform = str config.add_section("KRA") # Security Domain Authentication config.set("KRA", "pki_security_domain_https_port", "443") config.set("KRA", "pki_security_domain_password", self.admin_password) config.set("KRA", "pki_security_domain_user", self.admin_user) # issuing ca config.set("KRA", "pki_issuing_ca_uri", "https://%s" % ipautil.format_netloc(self.fqdn, 443)) # Server config.set("KRA", "pki_enable_proxy", "True") config.set("KRA", "pki_restart_configured_instance", "False") config.set("KRA", "pki_backup_keys", "True") config.set("KRA", "pki_backup_password", self.admin_password) # Client security database config.set("KRA", "pki_client_database_dir", self.tmp_agent_db) config.set("KRA", "pki_client_database_password", tmp_agent_pwd) config.set("KRA", "pki_client_database_purge", "True") config.set("KRA", "pki_client_pkcs12_password", self.admin_password) # Administrator config.set("KRA", "pki_admin_name", self.admin_user) config.set("KRA", "pki_admin_uid", self.admin_user) config.set("KRA", "pki_admin_email", "root@localhost") config.set("KRA", "pki_admin_password", self.admin_password) config.set("KRA", "pki_admin_nickname", "ipa-ca-agent") config.set("KRA", "pki_admin_subject_dn", str(DN(('cn', 'ipa-ca-agent'), self.subject_base))) config.set("KRA", "pki_import_admin_cert", "False") config.set("KRA", "pki_client_admin_cert_p12", admin_p12_file) # Directory server config.set("KRA", "pki_ds_ldap_port", "389") config.set("KRA", "pki_ds_password", self.dm_password) config.set("KRA", "pki_ds_base_dn", six.text_type(self.basedn)) config.set("KRA", "pki_ds_database", "ipaca") config.set("KRA", "pki_ds_create_new_db", "False") self._use_ldaps_during_spawn(config) # Certificate subject DNs config.set("KRA", "pki_subsystem_subject_dn", str(DN(('cn', 'CA Subsystem'), self.subject_base))) config.set("KRA", "pki_sslserver_subject_dn", str(DN(('cn', self.fqdn), self.subject_base))) config.set("KRA", "pki_audit_signing_subject_dn", str(DN(('cn', 'KRA Audit'), self.subject_base))) config.set( "KRA", "pki_transport_subject_dn", str(DN(('cn', 'KRA Transport Certificate'), self.subject_base))) config.set( "KRA", "pki_storage_subject_dn", str(DN(('cn', 'KRA Storage Certificate'), self.subject_base))) # Certificate nicknames # Note that both the server certs and subsystem certs reuse # the ca certs. config.set("KRA", "pki_subsystem_nickname", "subsystemCert cert-pki-ca") config.set("KRA", "pki_sslserver_nickname", "Server-Cert cert-pki-ca") config.set("KRA", "pki_audit_signing_nickname", "auditSigningCert cert-pki-kra") config.set("KRA", "pki_transport_nickname", "transportCert cert-pki-kra") config.set("KRA", "pki_storage_nickname", "storageCert cert-pki-kra") # Shared db settings # Needed because CA and KRA share the same database # We will use the dbuser created for the CA config.set("KRA", "pki_share_db", "True") config.set( "KRA", "pki_share_dbuser_dn", str(DN(('uid', 'pkidbuser'), ('ou', 'people'), ('o', 'ipaca')))) if not (os.path.isdir(paths.PKI_TOMCAT_ALIAS_DIR) and os.path.isfile(paths.PKI_TOMCAT_PASSWORD_CONF)): # generate pin which we know can be used for FIPS NSS database pki_pin = ipautil.ipa_generate_password() config.set("KRA", "pki_pin", pki_pin) else: pki_pin = None _p12_tmpfile_handle, p12_tmpfile_name = tempfile.mkstemp(dir=paths.TMP) if self.clone: krafile = self.pkcs12_info[0] shutil.copy(krafile, p12_tmpfile_name) pent = pwd.getpwnam(self.service_user) os.chown(p12_tmpfile_name, pent.pw_uid, pent.pw_gid) # Security domain registration config.set("KRA", "pki_security_domain_hostname", self.fqdn) config.set("KRA", "pki_security_domain_https_port", "443") config.set("KRA", "pki_security_domain_user", self.admin_user) config.set("KRA", "pki_security_domain_password", self.admin_password) # Clone config.set("KRA", "pki_clone", "True") config.set("KRA", "pki_clone_pkcs12_path", p12_tmpfile_name) config.set("KRA", "pki_clone_pkcs12_password", self.dm_password) config.set("KRA", "pki_clone_setup_replication", "False") config.set( "KRA", "pki_clone_uri", "https://%s" % ipautil.format_netloc(self.master_host, 443)) else: # the admin cert file is needed for the first instance of KRA cert = self.get_admin_cert() # First make sure that the directory exists parentdir = os.path.dirname(paths.ADMIN_CERT_PATH) if not os.path.exists(parentdir): os.makedirs(parentdir) with open(paths.ADMIN_CERT_PATH, "wb") as admin_path: admin_path.write( base64.b64encode(cert.public_bytes(x509.Encoding.DER)) ) # Generate configuration file with open(cfg_file, "w") as f: config.write(f) try: DogtagInstance.spawn_instance( self, cfg_file, nolog_list=(self.dm_password, self.admin_password, pki_pin, tmp_agent_pwd) ) finally: os.remove(p12_tmpfile_name) os.remove(cfg_file) os.remove(admin_p12_file) shutil.move(paths.KRA_BACKUP_KEYS_P12, paths.KRACERT_P12) logger.debug("completed creating KRA instance")