def getSavedProfileNames(): """ Reads in the names of the saved profiles from the getwatchlist.conf files in default and local and returns them as a list. """ profileNames = [] # First the defaults parser = ConfigParser() parser.optionxform = str parser.read(getDefaultConfPath()) profileNames = parser.sections() # Now the locals parser = ConfigParser() parser.optionxform = str parser.read(getLocalConfPath()) localProfileNames = parser.sections() # Now create one list for profile in localProfileNames: if not profile in profileNames: profileNames.append(profile) return profileNames
def load_default_env(auppath=DEFAULT_AUPTIMIZER_PATH, log=logger, use_default=True): """Load default environment variables for aup. Search recursively to upper folder :param auppath: aup environment folder, contains `env.ini` file :type auppath: str :param log: logger obj to trace where the function is called, default is aup.utils :type log: logging.Logger :param use_default: if auppath is empty, use user's home folder instead. :type use_default: bool :return: key-value of parameters :rtype: dict """ if not path.isfile(path.join(auppath, "env.ini")): if use_default: auppath = path.join(path.expanduser("~"), ".aup") log.warning("Use default env at %s" % auppath) if not path.isfile(path.join(auppath, "env.ini")): # pragma: no cover raise Exception("Failed to find env.ini") else: raise ValueError("Auptimizer folder %s is missing" % auppath) log.info("Auptimizer environment at %s", auppath) config = ConfigParser() config.optionxform = str config.read(path.join(auppath, "env.ini")) return {i[0]: i[1] for i in config.items("Auptimizer")}
def main(env, cpu, gpu, node, aws, user, overwrite, log): # pragma: no cover """ Create environment based on env file for Auptimizer \b\n Copyright (C) 2018 LG Electronics Inc. \b\n GPL-3.0 License. This program comes with ABSOLUTELY NO WARRANTY; \b\n Arguments: env {str}: Auptimizer config folder path. Default to create at `./.aup/`. Leave empty to create a new one. Or use the path of the filename (env.ini) to load predefined values (also use --overwrite). \b Raises: Exception: If failed to load the existing Auptimizer configuration file. """ coloredlogs.install(level=LOG_LEVEL[log], fmt="%(levelname)s - %(message)s") user = get_default_username(user) config = ConfigParser() config.optionxform = str if env == ".": # interactive config, cpu, gpu, node, aws, user, overwrite = \ interactive_setup(env, config, cpu, gpu, node, aws, user, overwrite) else: try: if not os.path.isfile(env): logger.info("Load default env.ini file.") env = os.path.join(env, "env.ini") config.read(env) except Exception as e: logger.fatal("failed to read %s", env) raise e setup(config, cpu, gpu, node, aws, user, overwrite, log)
def loadKeysConfig(path=None): """Load keys config file. If path is ``None``, a file named :any:`DEFAULT_KEYS_FILE` will be looked for in the config directory. :param path: path of the keyboard configuration file """ if path is None: path = getConfigFilePath(DEFAULT_KEYS_FILE) cfg = ConfigParser() cfg.optionxform = str cfg.read([path]) for category in cfg.sections(): for actionName in cfg.options(category): keystr = cfg.get(category, actionName) context = Qt.WidgetShortcut if keystr.startswith('widget:'): keystr = keystr.split(':', 1)[1] elif keystr.startswith('window:'): keystr = keystr.split(':', 1)[1] context = Qt.WindowShortcut elif keystr.startswith('children:'): keystr = keystr.split(':', 1)[1] context = Qt.WidgetWithChildrenShortcut elif keystr.startswith('application:'): keystr = keystr.split(':', 1)[1] context = Qt.ApplicationShortcut qks = QKeySequence(keystr) registerActionShortcut(category, actionName, qks, context)
def _setup_wrapper(filename, *args): config = ConfigParser() config.optionxform = str config.read(filename) try: setup.setup(config, *args) except OSError: print("Above 'No module named aup.setupdb' error can be ignored")
def getDefaultSavedProfileNames(): """ Reads in the names of the DEFAULT saved profiles from the getwatchlist.conf files in default and local and returns them as a list. """ parser = ConfigParser() parser.optionxform = str parser.read(getDefaultConfPath()) profileNames = parser.sections() return profileNames
def read_conf(): config_file = "conf/settings.ini" parser = ConfigParser() parser.optionxform = str parser.read(config_file) for section_name in parser.sections(): if section_name == 'MongoDBServer': mongodb = {x: y for x, y in parser.items(section_name)} if section_name == 'SolrServer': solr = {x: y for x, y in parser.items(section_name)} return mongodb, solr
def main(): ############################################################################# # # ARGUMENT PARSING # ############################################################################# parser = OptionParser(usage) parser.add_option( "-e", "--example", default=False, dest="example", action="store_true", help="Create example config.ini and an example sourcefile") (opts, args) = parser.parse_args() if opts.example: with open("omega_config.ini", "w") as f: f.write(ExampleConfig) with open("omegascanslist.txt", "w") as f: f.write(ExampleSourceFile) print( "Example files \"omega_config.ini\" and \"omegascanslist.txt\" are created" ) sys.exit(0) if len(args) != 1: parser.print_help() sys.exit("ERROR: Must provide one config.ini") cp = ConfigParser() cp.optionxform = str cp.readfp(open(args[0])) dag = OmegaScansDAG(cp) dag.write_sub_files() dag.write_dag() dag.write_script() #fix the sub and sh files #This is required because pipeline.py does not yet have the ability to add #a specific argument before all other arguments and options ('scan' in this case) fix_subfile(dag.submitFile) fix_scriptfile(cp.get('paths', 'basedir'), dag.get_dag_file(), cp.get('omegapipe', 'executable')) print('Successfully created DAG file.') fulldagpath = os.path.join(cp.get('paths', 'basedir'), dag.get_dag_file()) print('Now run condor_submit_dag %s\n' % (fulldagpath))
def _load_config(no_cfgfile=False): config = ConfigParser() config.optionxform = str # make it preserve case # defaults if not six.PY3: config.readfp(BytesIO(_DEFAULT_CONFIG)) else: config.read_file(StringIO(_DEFAULT_CONFIG)) # update from config file if not no_cfgfile: config.read(os.path.join(_STASH_ROOT, f) for f in _STASH_CONFIG_FILES) return config
def _install_desktop_file(self, destdir, prefix, activity_path): cp = ConfigParser() section = 'Desktop Entry' cp.add_section(section) cp.optionxform = str # Allow CamelCase entries # Get it from the activity.info for the non-translated version info = ConfigParser() info_path = os.path.join(destdir, os.path.relpath(activity_path, '/'), 'activity', 'activity.info') info.read(info_path) cp.set(section, 'Name', info.get('Activity', 'name')) if info.has_option('Activity', 'summary'): cp.set(section, 'Comment', info.get('Activity', 'summary')) for path in sorted( glob( os.path.join(activity_path, 'locale', '*', 'activity.linfo'))): locale = path.split(os.path.sep)[-2] info = ConfigParser() info.read(path) if info.has_option('Activity', 'name'): cp.set(section, 'Name[{}]'.format(locale), info.get('Activity', 'name')) if info.has_option('Activity', 'summary'): cp.set(section, 'Comment[{}]'.format(locale), info.get('Activity', 'summary')) cp.set(section, 'Terminal', 'false') cp.set(section, 'Type', 'Application') cp.set(section, 'Categories', 'Education;') cp.set( section, 'Icon', os.path.join(activity_path, 'activity', self.config.bundle.get_icon_filename())) cp.set(section, 'Exec', self.config.bundle.get_command()) cp.set(section, 'Path', activity_path) # Path == CWD for running name = '{}.activity.desktop'.format(self.config.bundle_id) path = os.path.join(destdir, os.path.relpath(prefix, '/'), 'share', 'applications', name) if not os.path.isdir(os.path.dirname(path)): os.makedirs(os.path.dirname(path)) with open(path, 'w') as f: cp.write(f) print('Install %s' % (path))
def main(): ############################################################################# # # ARGUMENT PARSING # ############################################################################# parser=OptionParser(usage) parser.add_option("-e","--example",default=False,dest="example",action="store_true",help="Create example config.ini and an example sourcefile") (opts,args) = parser.parse_args() if opts.example: with open("omega_config.ini","w") as f: f.write(ExampleConfig) with open("omegascanslist.txt","w") as f: f.write(ExampleSourceFile) print("Example files \"omega_config.ini\" and \"omegascanslist.txt\" are created") sys.exit(0) if len(args) != 1: parser.print_help() sys.exit("ERROR: Must provide one config.ini") cp=ConfigParser() cp.optionxform = str cp.readfp(open(args[0])) dag=OmegaScansDAG(cp) dag.write_sub_files() dag.write_dag() dag.write_script() #fix the sub and sh files #This is required because pipeline.py does not yet have the ability to add #a specific argument before all other arguments and options ('scan' in this case) fix_subfile(dag.submitFile) fix_scriptfile(cp.get('paths','basedir'),dag.get_dag_file(),cp.get('omegapipe','executable')) print('Successfully created DAG file.') fulldagpath=os.path.join(cp.get('paths','basedir'),dag.get_dag_file()) print('Now run condor_submit_dag %s\n'%(fulldagpath))
def _update_fstab(newroot): newfstab = Fstab("%s/etc/fstab" % newroot) if not newfstab.exists(): log.info("The new layer contains no fstab, skipping.") return log.debug("Checking new fstab: %s" % newfstab) log.info("Updating fstab of new layer") rootentry = newfstab.by_target("/") rootentry.source = new_lv.path newfstab.update(rootentry) # Ensure that discard is used # This can also be done in anaconda once it is fixed targets = list(constants.volume_paths().keys()) + ["/"] for tgt in targets: try: e = newfstab.by_target(tgt) if "discard" not in e.options: e.options += ["discard"] newfstab.update(e) except KeyError: # Created with imgbased.volume? log.debug("{} not found in /etc/fstab. " "not created by Anaconda".format(tgt)) from six.moves.configparser import ConfigParser c = ConfigParser() c.optionxform = str sub = re.sub(r'^/', '', tgt) sub = re.sub(r'/', '-', sub) fname = "{}/etc/systemd/system/{}.mount".format(newroot, sub) c.read(fname) if 'discard' not in c.get('Mount', 'Options'): c.set('Mount', 'Options', ','.join([c.get('Mount', 'Options'), 'discard'])) with open(fname, 'w') as mountfile: c.write(mountfile)
def get_conf_stanzas(conf_name): '''Get stanzas of `conf_name` :param conf_name: Config file. :type conf_name: ``string`` :returns: Config stanzas. :rtype: ``dict`` Usage:: >>> stanzas = get_conf_stanzas('server') >>> return: {'serverName': 'testServer', 'sessionTimeout': '1h', ...} ''' if conf_name.endswith('.conf'): conf_name = conf_name[:-5] # TODO: dynamically caculate SPLUNK_HOME btool_cli = [ op.join(os.environ['SPLUNK_HOME'], 'bin', 'btool'), conf_name, 'list' ] p = subprocess.Popen(btool_cli, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, _ = p.communicate() if isinstance(out, bytes): out = out.decode() out = StringIO(out) parser = ConfigParser() parser.optionxform = str if sys.version_info[:2] >= (3, 2): parser.read_file(out) else: parser.readfp(out) out = {} for section in parser.sections(): out[section] = {item[0]: item[1] for item in parser.items(section)} return out
def _install_desktop_file(self, prefix, activity_path): cp = ConfigParser() section = 'Desktop Entry' cp.add_section(section) cp.optionxform = str # Allow CamelCase entries # Get it from the activity.info for the non-translated version info = ConfigParser() info.read(os.path.join(activity_path, 'activity', 'activity.info')) cp.set(section, 'Name', info.get('Activity', 'name')) if info.has_option('Activity', 'summary'): cp.set(section, 'Comment', info.get('Activity', 'summary')) for path in sorted(glob(os.path.join(activity_path, 'locale', '*', 'activity.linfo'))): locale = path.split(os.path.sep)[-2] info = ConfigParser() info.read(path) if info.has_option('Activity', 'name'): cp.set(section, 'Name[{}]'.format(locale), info.get('Activity', 'name')) if info.has_option('Activity', 'summary'): cp.set(section, 'Comment[{}]'.format(locale), info.get('Activity', 'summary')) cp.set(section, 'Terminal', 'false') cp.set(section, 'Type', 'Application') cp.set(section, 'Categories', 'Education;') cp.set(section, 'Icon', os.path.join( activity_path, 'activity', self.config.bundle.get_icon_filename())) cp.set(section, 'Exec', self.config.bundle.get_command()) cp.set(section, 'Path', activity_path) # Path == CWD for running name = '{}.activity.desktop'.format(self.config.bundle_id) path = os.path.join(prefix, 'share', 'applications', name) if not os.path.isdir(os.path.dirname(path)): os.makedirs(os.path.dirname(path)) with open(path, 'w') as f: cp.write(f)
def _buildInput (self): """ Build the input data Input could be: 1. list: ['input', 'infile:file'] <=> ['input:var', 'infile:path'] 2. str : "input, infile:file" <=> input:var, infile:path 3. dict: {"input": channel1, "infile:file": channel2} or {"input:var, input:file" : channel3} for 1,2 channels will be the combined channel from dependents, if there is not dependents, it will be sys.argv[1:] """ self.props['input'] = {} if self.resume in ['skip+', 'resume']: from six.moves.configparser import ConfigParser psfile = path.join(self.workdir, 'proc.settings') if not path.isfile(psfile): raise ProcInputError(psfile, 'Cannot parse input for skip+/resume process, no such file') cp = ConfigParser() cp.optionxform = str cp.read(psfile) self.props['size'] = int(json.loads(cp.get('size', 'value'))) indata = OrderedDict(cp.items('input')) intype = '' inname = '' for key in indata.keys(): if key.endswith('.type'): intype = indata[key] inname = key[:-5] self.props['input'][inname] = { 'type': intype, 'data': [] } elif key.startswith(inname + '.data#'): if intype in Proc.IN_FILESTYPE: data = [json.loads(s) for s in filter(None, indata[key].splitlines())] else: data = json.loads(indata[key].strip()) self.props['input'][inname]['data'].append(data) self.props['jobs'] = [None] * self.size else: indata = self.config['input'] if not isinstance (indata, dict): indata = ','.join(utils.alwaysList(indata)) indata = { indata: Channel.fromChannels (*[d.channel for d in self.depends]) \ if self.depends else Channel.fromArgv() } inkeys = list(indata.keys()) pinkeys = [] pintypes = [] for key in utils.alwaysList(inkeys): if ':' not in key: pinkeys.append(key) pintypes.append(Proc.IN_VARTYPE[0]) else: k, t = key.split(':') if t not in Proc.IN_VARTYPE + Proc.IN_FILESTYPE + Proc.IN_FILETYPE: raise ProcInputError(t, 'Unknown input type') pinkeys.append(k) pintypes.append(t) invals = Channel.create() for inkey in inkeys: inval = indata[inkey] if callable(inval): inval = inval (*[d.channel for d in self.depends] if self.depends else Channel.fromArgv()) invals = invals.cbind(inval) elif isinstance(inval, Channel): invals = invals.cbind(inval) else: invals = invals.cbind(Channel.create(inval)) self.props['size'] = invals.length() self.props['jobs'] = [None] * self.size # support empty input pinkeys = list(filter(None, pinkeys)) wdata = invals.width() if len(pinkeys) < wdata: self.log('Not all data are used as input, %s column(s) wasted.' % (wdata - len(pinkeys)), 'warning') for i, inkey in enumerate(pinkeys): self.props['input'][inkey] = {} self.props['input'][inkey]['type'] = pintypes[i] if i < wdata: self.props['input'][inkey]['data'] = invals.flatten(i) else: self.log('No data found for input key "%s", use empty strings/lists instead.' % inkey, 'warning') self.props['input'][inkey]['data'] = [[] if pintypes[i] in Proc.IN_FILESTYPE else ''] * self.size
type='int', metavar='NUM', help='Number of prior samples to analyse') (opts, args) = parser.parse_args() if len(args) == 0: parser.print_help() sys.exit(1) inifile = args[0] # Set up the configuration for the sub-dags prior_cp = ConfigParser() prior_cp.optionxform = str prior_cp.readfp(open(inifile)) main_cp = ConfigParser() main_cp.optionxform = str main_cp.readfp(open(inifile)) rundir = os.path.abspath(opts.run_path) if opts.daglog_path is not None: prior_cp.set('paths', 'daglogdir', os.path.join(os.path.abspath(opts.daglog_path), 'prior')) main_cp.set('paths', 'daglogdir', os.path.join(os.path.abspath(opts.daglog_path), 'main')) daglogdir = os.path.abspath(opts.daglog_path) else:
def getExactSavedProfile(profileName, fromLocal=False): """ Reads a single profile from the getwatchlist.conf in the default or local directories and returns them in a dictionary. If the profile is not found, it returns the default settings. """ parser = ConfigParser() parser.optionxform = str if fromLocal: parser.read(getLocalConfPath()) else: parser.read(getDefaultConfPath()) sections = parser.sections() settings = getDefaultSettings() # We start with the globals. These can be overridden with local settings # in the config for the profile. The globals are just nice to have. realGlobalName = getRealProfileName('globals', sections, fromLocal) if realGlobalName is not None: for key, value in parser.items(realGlobalName): lowkey = key.lower() if lowkey == 'proxyhost': settings['proxyHost'] = value elif lowkey == 'proxyport': settings['proxyPort'] = str(value) # The profile name should be a URL if the profile doesn't exist # Otherwise, it will be overwritten by the url in the profile # THIS CHANGED after we have the local and default profiles # settings['url'] = profileName realProfileName = getRealProfileName(profileName, sections, fromLocal) # if the profile isn't in the file return defaults if not realProfileName: return settings # now grab all of the settings at once and assign for key, value in parser.items(realProfileName): lowKey = key.lower() # strip any quotes from our value value = value.replace('\'', '') value = value.replace('"', '') value = value.strip() if lowKey == 'url': settings['url'] = value elif lowKey == 'delimiter': settings['delimiter'] = value elif lowKey == 'comment': settings['comment'] = value elif lowKey == 'relevantfieldname': settings['relevantFieldName'] = value elif lowKey == 'relevantfieldcol': settings['relevantFieldCol'] = int(value) - 1 elif lowKey == 'categorycol': settings['categoryCol'] = int(value) - 1 elif lowKey == 'referencecol': settings['referenceCol'] = int(value) - 1 elif lowKey == 'datecol': settings['dateCol'] = int(value) - 1 elif lowKey == 'authuser': settings['authUser'] = value elif lowKey == 'authpass': settings['authPass'] = value elif lowKey == 'ignorefirstline': settings['ignoreFirstLine'] = bool(value) elif lowKey == 'proxyhost': settings['proxyHost'] = value elif lowKey == 'proxyport': settings['proxyPort'] = str(value) else: if lowKey.isdigit(): settings['addCols'][int(key)] = value else: settings['customFields'][key] = value return settings
def __spawn_instance(self): """ Create and configure a new KRA instance using pkispawn. Creates a configuration file with IPA-specific parameters and passes it to the base class to call pkispawn """ # Create an empty and secured file (cfg_fd, cfg_file) = tempfile.mkstemp() os.close(cfg_fd) pent = pwd.getpwnam(self.service_user) os.chown(cfg_file, pent.pw_uid, pent.pw_gid) # Create KRA configuration config = ConfigParser() config.optionxform = str config.add_section("KRA") # Security Domain Authentication config.set("KRA", "pki_security_domain_https_port", "443") config.set("KRA", "pki_security_domain_password", self.admin_password) config.set("KRA", "pki_security_domain_user", self.admin_user) # issuing ca config.set("KRA", "pki_issuing_ca_uri", "https://%s" % ipautil.format_netloc(self.fqdn, 443)) # Server config.set("KRA", "pki_enable_proxy", "True") config.set("KRA", "pki_restart_configured_instance", "False") config.set("KRA", "pki_backup_keys", "True") config.set("KRA", "pki_backup_password", self.admin_password) # Client security database config.set("KRA", "pki_client_database_dir", self.agent_db) config.set("KRA", "pki_client_database_password", self.admin_password) config.set("KRA", "pki_client_database_purge", "False") config.set("KRA", "pki_client_pkcs12_password", self.admin_password) # Administrator config.set("KRA", "pki_admin_name", self.admin_user) config.set("KRA", "pki_admin_uid", self.admin_user) config.set("KRA", "pki_admin_email", "root@localhost") config.set("KRA", "pki_admin_password", self.admin_password) config.set("KRA", "pki_admin_nickname", "ipa-ca-agent") config.set("KRA", "pki_admin_subject_dn", str(DN(('cn', 'ipa-ca-agent'), self.subject_base))) config.set("KRA", "pki_import_admin_cert", "True") config.set("KRA", "pki_admin_cert_file", paths.ADMIN_CERT_PATH) config.set("KRA", "pki_client_admin_cert_p12", paths.DOGTAG_ADMIN_P12) # Directory server config.set("KRA", "pki_ds_ldap_port", "389") config.set("KRA", "pki_ds_password", self.dm_password) config.set("KRA", "pki_ds_base_dn", self.basedn) config.set("KRA", "pki_ds_database", "ipaca") config.set("KRA", "pki_ds_create_new_db", "False") self._use_ldaps_during_spawn(config) # Certificate subject DNs config.set("KRA", "pki_subsystem_subject_dn", str(DN(('cn', 'CA Subsystem'), self.subject_base))) config.set("KRA", "pki_ssl_server_subject_dn", str(DN(('cn', self.fqdn), self.subject_base))) config.set("KRA", "pki_audit_signing_subject_dn", str(DN(('cn', 'KRA Audit'), self.subject_base))) config.set( "KRA", "pki_transport_subject_dn", str(DN(('cn', 'KRA Transport Certificate'), self.subject_base))) config.set( "KRA", "pki_storage_subject_dn", str(DN(('cn', 'KRA Storage Certificate'), self.subject_base))) # Certificate nicknames # Note that both the server certs and subsystem certs reuse # the ca certs. config.set("KRA", "pki_subsystem_nickname", "subsystemCert cert-pki-ca") config.set("KRA", "pki_ssl_server_nickname", "Server-Cert cert-pki-ca") config.set("KRA", "pki_audit_signing_nickname", "auditSigningCert cert-pki-kra") config.set("KRA", "pki_transport_nickname", "transportCert cert-pki-kra") config.set("KRA", "pki_storage_nickname", "storageCert cert-pki-kra") # Shared db settings # Needed because CA and KRA share the same database # We will use the dbuser created for the CA config.set("KRA", "pki_share_db", "True") config.set( "KRA", "pki_share_dbuser_dn", str(DN(('uid', 'pkidbuser'), ('ou', 'people'), ('o', 'ipaca')))) _p12_tmpfile_handle, p12_tmpfile_name = tempfile.mkstemp(dir=paths.TMP) if self.clone: krafile = self.pkcs12_info[0] shutil.copy(krafile, p12_tmpfile_name) pent = pwd.getpwnam(self.service_user) os.chown(p12_tmpfile_name, pent.pw_uid, pent.pw_gid) # Security domain registration config.set("KRA", "pki_security_domain_hostname", self.master_host) config.set("KRA", "pki_security_domain_https_port", "443") config.set("KRA", "pki_security_domain_user", self.admin_user) config.set("KRA", "pki_security_domain_password", self.admin_password) # Clone config.set("KRA", "pki_clone", "True") config.set("KRA", "pki_clone_pkcs12_path", p12_tmpfile_name) config.set("KRA", "pki_clone_pkcs12_password", self.dm_password) config.set("KRA", "pki_clone_setup_replication", "False") config.set( "KRA", "pki_clone_uri", "https://%s" % ipautil.format_netloc(self.master_host, 443)) else: # the admin cert file is needed for the first instance of KRA cert = DogtagInstance.get_admin_cert(self) with open(paths.ADMIN_CERT_PATH, "w") as admin_path: admin_path.write(cert) # Generate configuration file with open(cfg_file, "wb") as f: config.write(f) try: DogtagInstance.spawn_instance( self, cfg_file, nolog_list=(self.dm_password, self.admin_password) ) finally: os.remove(p12_tmpfile_name) os.remove(cfg_file) shutil.move(paths.KRA_BACKUP_KEYS_P12, paths.KRACERT_P12) export_kra_agent_pem() self.log.debug("completed creating KRA instance")
parser = argparse.ArgumentParser( description = description ) parser.add_argument("inifile", help="The configuation (.ini) file") # the positional argument for the configuration file parser.add_argument("--condor-submit", action="store_true", default=False, help="Automatically submit the Condor DAG") parser.add_argument("-r", "--run-path", dest="runpath", default=None, help="Set the directory to run the pipeline in (overwrites any value in the config.ini file)") parser.add_argument("-p", "--pulsar", dest="pulsarlist", action='append', default=None, help="A pulsar name to search for rather than all pulsars given in a parameter file directory (this can be specified multiple times to search for more than one pulsar).") opts = parser.parse_args() # check that at least the ini file has been given inifile = opts.inifile # parser .ini file try: cp = ConfigParser() cp.optionxform = str cp.readfp(open(inifile)) except: print("Error... problem parsing '%s' configuration file" % inifile, file=sys.stderr) sys.exit(1) if opts.runpath is not None: cp.set('analysis', 'run_dir', opts.runpath) # Check if we're running in automated mode or not try: automated = cp.getboolean('analysis', 'autonomous') except: automated = False # Check if configuration file says to submit the DAG
def __spawn_instance(self): """ Create and configure a new KRA instance using pkispawn. Creates a configuration file with IPA-specific parameters and passes it to the base class to call pkispawn """ # Create an empty and secured file (cfg_fd, cfg_file) = tempfile.mkstemp() os.close(cfg_fd) pent = pwd.getpwnam(self.service_user) os.chown(cfg_file, pent.pw_uid, pent.pw_gid) # Create KRA configuration config = ConfigParser() config.optionxform = str config.add_section("KRA") # Security Domain Authentication config.set("KRA", "pki_security_domain_https_port", "443") config.set("KRA", "pki_security_domain_password", self.admin_password) config.set("KRA", "pki_security_domain_user", self.admin_user) # issuing ca config.set("KRA", "pki_issuing_ca_uri", "https://%s" % ipautil.format_netloc(self.fqdn, 443)) # Server config.set("KRA", "pki_enable_proxy", "True") config.set("KRA", "pki_restart_configured_instance", "False") config.set("KRA", "pki_backup_keys", "True") config.set("KRA", "pki_backup_password", self.admin_password) # Client security database config.set("KRA", "pki_client_database_dir", self.agent_db) config.set("KRA", "pki_client_database_password", self.admin_password) config.set("KRA", "pki_client_database_purge", "False") config.set("KRA", "pki_client_pkcs12_password", self.admin_password) # Administrator config.set("KRA", "pki_admin_name", self.admin_user) config.set("KRA", "pki_admin_uid", self.admin_user) config.set("KRA", "pki_admin_email", "root@localhost") config.set("KRA", "pki_admin_password", self.admin_password) config.set("KRA", "pki_admin_nickname", "ipa-ca-agent") config.set("KRA", "pki_admin_subject_dn", str(DN(('cn', 'ipa-ca-agent'), self.subject_base))) config.set("KRA", "pki_import_admin_cert", "True") config.set("KRA", "pki_admin_cert_file", paths.ADMIN_CERT_PATH) config.set("KRA", "pki_client_admin_cert_p12", paths.DOGTAG_ADMIN_P12) # Directory server config.set("KRA", "pki_ds_ldap_port", "389") config.set("KRA", "pki_ds_password", self.dm_password) config.set("KRA", "pki_ds_base_dn", six.text_type(self.basedn)) config.set("KRA", "pki_ds_database", "ipaca") config.set("KRA", "pki_ds_create_new_db", "False") self._use_ldaps_during_spawn(config) # Certificate subject DNs config.set("KRA", "pki_subsystem_subject_dn", str(DN(('cn', 'CA Subsystem'), self.subject_base))) config.set("KRA", "pki_ssl_server_subject_dn", str(DN(('cn', self.fqdn), self.subject_base))) config.set("KRA", "pki_audit_signing_subject_dn", str(DN(('cn', 'KRA Audit'), self.subject_base))) config.set( "KRA", "pki_transport_subject_dn", str(DN(('cn', 'KRA Transport Certificate'), self.subject_base))) config.set( "KRA", "pki_storage_subject_dn", str(DN(('cn', 'KRA Storage Certificate'), self.subject_base))) # Certificate nicknames # Note that both the server certs and subsystem certs reuse # the ca certs. config.set("KRA", "pki_subsystem_nickname", "subsystemCert cert-pki-ca") config.set("KRA", "pki_ssl_server_nickname", "Server-Cert cert-pki-ca") config.set("KRA", "pki_audit_signing_nickname", "auditSigningCert cert-pki-kra") config.set("KRA", "pki_transport_nickname", "transportCert cert-pki-kra") config.set("KRA", "pki_storage_nickname", "storageCert cert-pki-kra") # Shared db settings # Needed because CA and KRA share the same database # We will use the dbuser created for the CA config.set("KRA", "pki_share_db", "True") config.set( "KRA", "pki_share_dbuser_dn", str(DN(('uid', 'pkidbuser'), ('ou', 'people'), ('o', 'ipaca')))) _p12_tmpfile_handle, p12_tmpfile_name = tempfile.mkstemp(dir=paths.TMP) if self.clone: krafile = self.pkcs12_info[0] shutil.copy(krafile, p12_tmpfile_name) pent = pwd.getpwnam(self.service_user) os.chown(p12_tmpfile_name, pent.pw_uid, pent.pw_gid) # Security domain registration config.set("KRA", "pki_security_domain_hostname", self.master_host) config.set("KRA", "pki_security_domain_https_port", "443") config.set("KRA", "pki_security_domain_user", self.admin_user) config.set("KRA", "pki_security_domain_password", self.admin_password) # Clone config.set("KRA", "pki_clone", "True") config.set("KRA", "pki_clone_pkcs12_path", p12_tmpfile_name) config.set("KRA", "pki_clone_pkcs12_password", self.dm_password) config.set("KRA", "pki_clone_setup_replication", "False") config.set( "KRA", "pki_clone_uri", "https://%s" % ipautil.format_netloc(self.master_host, 443)) else: # the admin cert file is needed for the first instance of KRA cert = DogtagInstance.get_admin_cert(self) # First make sure that the directory exists parentdir = os.path.dirname(paths.ADMIN_CERT_PATH) if not os.path.exists(parentdir): os.makedirs(parentdir) with open(paths.ADMIN_CERT_PATH, "w") as admin_path: admin_path.write(cert) # Generate configuration file with open(cfg_file, "w") as f: config.write(f) try: DogtagInstance.spawn_instance(self, cfg_file, nolog_list=(self.dm_password, self.admin_password)) finally: os.remove(p12_tmpfile_name) os.remove(cfg_file) shutil.move(paths.KRA_BACKUP_KEYS_P12, paths.KRACERT_P12) export_kra_agent_pem() self.log.debug("completed creating KRA instance")
dest="pulsarlist", action='append', default=None, help= "A pulsar name to search for rather than all pulsars given in a parameter file directory (this can be specified multiple times to search for more than one pulsar)." ) opts = parser.parse_args() # check that at least the ini file has been given inifile = opts.inifile # parser .ini file try: cp = ConfigParser() cp.optionxform = str cp.readfp(open(inifile)) except: print("Error... problem parsing '%s' configuration file" % inifile, file=sys.stderr) sys.exit(1) if opts.runpath is not None: cp.set('analysis', 'run_dir', opts.runpath) # Check if we're running in automated mode or not try: automated = cp.getboolean('analysis', 'autonomous') except: automated = False