def main(): """rosa db-create.""" db_conf = ResourceLocator.default().get_conf().get(["rosie-db"]) if db_conf is not None: opts = RoseOptionParser().parse_args()[0] reporter = Reporter(opts.verbosity - opts.quietness) init = RosieDatabaseInitiator(event_handler=reporter) conf = ResourceLocator.default().get_conf() for key in db_conf.value: if key.startswith("db."): prefix = key.replace("db.", "", 1) db_url = conf.get_value(["rosie-db", "db." + prefix]) repos_path = conf.get_value(["rosie-db", "repos." + prefix]) init(db_url, repos_path)
def _search(cls, users, attr_idx): """Search LDAP directory for the indexed attr for users. Attr index can be UID_IDX, CN_IDX or MAIL_IDX. Return a list containing the results. """ conf = ResourceLocator.default().get_conf() uri = conf.get_value(["rosa-ldap", "uri"]) binddn = conf.get_value(["rosa-ldap", "binddn"]) passwd = "" passwd_file = conf.get_value(["rosa-ldap", "password-file"], cls.PASSWD_FILE) if passwd_file: passwd = open(os.path.expanduser(passwd_file)).read().strip() basedn = conf.get_value(["rosa-ldap", "basedn"], "") filter_str = "(|(uid=" + ")(uid=".join(users) + "))" filter_more_str = conf.get_value(["rosa-ldap", "filter-more"], "") if filter_more_str: filter_str = "(&" + filter_str + filter_more_str + ")" user_attr_str = conf.get_value(["rosa-ldap", "attrs"], cls.USER_ATTRS) attr = user_attr_str.split()[attr_idx] tls_ca_file = conf.get_value(["rosa-ldap", "tls-ca-file"]) if tls_ca_file: ldap.set_option(ldap.OPT_X_TLS_CACERTFILE, tls_ca_file) conn = ldap.initialize(uri) conn.bind_s(binddn, passwd) results = conn.search_s(basedn, ldap.SCOPE_SUBTREE, filter_str, [attr]) conn.unbind() return [result[1][attr][0] for result in results]
def expand(self, names=None, rank_method=None, thresholds=None): """Expand each name in names, and look up rank method for each name. names, if specified, should be a list of host names or known groups in the site / user configuration file. Otherwise, the default setting in the site / user configuration file will be used. rank_method, if specified, should be the name of a supported ranking method. If not specified, use the default specified for a host group. If the default differs in hosts, use "load:15". """ conf = ResourceLocator.default().get_conf() if not names: node = conf.get(["rose-host-select", "default"], no_ignore=True) if node: names = [node.value] else: raise NoHostError() host_names = [] rank_method_set = set() thresholds_set = set() while names: name = names.pop() key = "group{" + name + "}" value = conf.get_value(["rose-host-select", key]) if value is None: host_names.append(name) else: for val in value.split(): names.append(val) if rank_method is None: key = "method{" + name + "}" method_str = conf.get_value(["rose-host-select", key]) if method_str is None: rank_method_set.add(self.RANK_METHOD_DEFAULT) else: rank_method_set.add(method_str) if thresholds is None: key = "thresholds{" + name + "}" threshold_str = conf.get_value(["rose-host-select", key]) if threshold_str is None: thresholds_set.add(()) else: thresholds_set.add( tuple(sorted(shlex.split(threshold_str)))) # If default rank method differs in hosts, use load:15. if rank_method is None: if len(rank_method_set) == 1: rank_method = rank_method_set.pop() else: rank_method = self.RANK_METHOD_DEFAULT if thresholds is None: if len(thresholds_set) == 1: thresholds = thresholds_set.pop() return host_names, rank_method, thresholds
def _verify_users(self, status, path, txn_owner, txn_access_list, bad_changes): """Check txn_owner and txn_access_list. For any invalid users, append to bad_changes and return True. """ # The owner and names in access list must be real users conf = ResourceLocator.default().get_conf() user_tool_name = conf.get_value(["rosa-svn", "user-tool"]) if not user_tool_name: return False user_tool = self.usertools_manager.get_handler(user_tool_name) txn_users = set([txn_owner] + txn_access_list) txn_users.discard("*") bad_users = user_tool.verify_users(txn_users) for bad_user in bad_users: if txn_owner == bad_user: bad_change = BadChange(status, path, BadChange.USER, "owner=" + bad_user) bad_changes.append(bad_change) if bad_user in txn_access_list: bad_change = BadChange(status, path, BadChange.USER, "access-list=" + bad_user) bad_changes.append(bad_change) return bool(bad_users)
def get_prefix_default(cls): """Return the default prefix.""" config = ResourceLocator.default().get_conf() value = config.get_value(["rosie-id", "prefix-default"]) if not value or not value.strip(): raise SuiteIdPrefixError() return shlex.split(value)[0]
def get_suite_log_url(self, user_name, suite_name): """Return the "rose bush" URL for a user's suite.""" prefix = "~" if user_name: prefix += user_name suite_d = os.path.join(prefix, self.get_suite_dir_rel(suite_name)) suite_d = os.path.expanduser(suite_d) if not os.path.isdir(suite_d): raise NoSuiteLogError(user_name, suite_name) rose_bush_url = None for f_name in glob(os.path.expanduser("~/.metomi/rose-bush*.status")): status = {} for line in open(f_name): key, value = line.strip().split("=", 1) status[key] = value if status.get("host"): rose_bush_url = "http://" + status["host"] if status.get("port"): rose_bush_url += ":" + status["port"] rose_bush_url += "/" break if not rose_bush_url: conf = ResourceLocator.default().get_conf() rose_bush_url = conf.get_value( ["rose-suite-log", "rose-bush"]) if not rose_bush_url: return "file://" + suite_d if not rose_bush_url.endswith("/"): rose_bush_url += "/" if not user_name: user_name = pwd.getpwuid(os.getuid()).pw_name return rose_bush_url + "/".join( ["taskjobs", user_name, suite_name])
def get_cmd(self, key, *args): """Return default options and arguments of a known command as a list. If a setting [external] <key> is defined in the site/user configuration, use the setting. Otherwise, if RosePopener.ENVS_OF_CMDS[key] exists, it looks for each environment variable in the list in RosePopener.ENVS_OF_CMDS[key] in order. If the environment variable is defined and is not a null string, use the value of the environment variable. Otherwise, return RosePopener.CMDS[key] key: must be a key of RosePopener.CMDS args: if specified, will be added to the returned list """ if key not in self.cmds: root_node = ResourceLocator.default().get_conf() node = root_node.get(["external", key], no_ignore=True) if node is not None: self.cmds[key] = shlex.split(node.value) if key not in self.cmds: for name in self.ENVS_OF_CMDS.get(key, []): if os.getenv(name): # not None, not null str self.cmds[key] = shlex.split(os.getenv(name)) break if key not in self.cmds: self.cmds[key] = self.CMDS[key] return self.cmds[key] + list(args)
def __init__(self, prefixes=None, prompt_func=None, popen=None, event_handler=None): if not event_handler: event_handler = Reporter() if not popen: popen = RosePopener(event_handler=event_handler) self.event_handler = event_handler self.popen = popen self.prompt_func = prompt_func self.prefixes = [] self.unreachable_prefixes = [] self.auth_managers = {} conf = ResourceLocator.default().get_conf() conf_rosie_id = conf.get(["rosie-id"], no_ignore=True) if conf_rosie_id is None: raise RosieWSClientConfError() for key, node in conf_rosie_id.value.items(): if node.is_ignored() or not key.startswith("prefix-ws."): continue prefix = key.replace("prefix-ws.", "") self.auth_managers[prefix] = RosieWSClientAuthManager( prefix, popen=self.popen, prompt_func=self.prompt_func) if not prefixes: prefixes_str = conf_rosie_id.get_value(["prefixes-ws-default"]) if prefixes_str: prefixes = shlex.split(prefixes_str) else: prefixes = sorted(self.auth_managers.keys()) self.set_prefixes(prefixes)
def _get_conf_value(self, name, default=None): """Return the value of a named conf setting for this prefix.""" conf = ResourceLocator.default().get_conf() value = conf.get_value( ["rosie-id", "prefix-%s.%s" % (name, self.prefix)], default=default ) if value: value = env_var_process(value) return value
def get_prefix_location(cls, prefix=None): """Return the repository location of a given prefix.""" if prefix is None: prefix = cls.get_prefix_default() key = "prefix-location." + prefix config = ResourceLocator.default().get_conf() value = config.get_value(["rosie-id", key]) if value is None: raise SuiteIdPrefixError(prefix) return value.rstrip("/")
def get_prefix_web(cls, prefix=None): """Return a url for the prefix repository source url.""" if prefix is None: prefix = cls.get_prefix_default() key = "prefix-web." + prefix config = ResourceLocator.default().get_conf() value = config.get_value(["rosie-id", key]) if value is None: raise SuiteIdPrefixError(prefix) return value.rstrip("/")
def run(self, app_runner, conf_tree, opts, args, uuid, work_files): """Implement the "rose ana" command""" # Get config file option for user-specified method paths method_paths = [ os.path.join(os.path.dirname(__file__), USRCOMPARISON_DIRNAME) ] conf = ResourceLocator.default().get_conf() my_conf = conf.get_value(["rose-ana", "method-path"]) if my_conf: for item in my_conf.split(): method_paths.append(item) # Initialise the analysis engine engine = Analyse( conf_tree.node, opts, args, method_paths, reporter=app_runner.event_handler, popen=app_runner.popen, ) # Initialise a database session rose_ana_task_name = os.getenv("ROSE_TASK_NAME") kgo_db = KGODatabase() # Add an entry for this task, with a non-zero status code kgo_db.enter_task(rose_ana_task_name, 1) # Run the analysis num_failed, tasks = engine.analyse() # Update the database to indicate that the task succeeded kgo_db.enter_task(rose_ana_task_name, 0) # Update the database for task in tasks: # The primary key in the database is composed from both the # rose_ana app name and the task index (to make it unique) app_task = "{0} ({1})".format(rose_ana_task_name, task.name) # Include an indication of what extract/comparison was performed comparison = "{0} : {1} : {2}".format( task.comparison, task.extract, getattr(task, "subextract", "")) kgo_db.enter_comparison( app_task, task.kgo1file, task.resultfile, task.userstatus, comparison, ) if num_failed != 0: raise TestsFailedException(num_failed)
def initialize(self, props, prefix, db_url, service_root): self.props = props self.prefix = prefix source_option = "prefix-web." + self.prefix source_url_node = ResourceLocator.default().get_conf().get( ["rosie-id", source_option]) self.source_url = "" if source_url_node is not None: self.source_url = source_url_node.value self.dao = metomi.rosie.db.DAO(db_url) self.service_root = service_root[:-1] # remove the '?' regex aspect
def __init__(self, *args, **kwargs): if hasattr(kwargs, "prog"): namespace, util = kwargs["prog"].split(None, 1) resource_loc = ResourceLocator(namespace=namespace, util=util) else: resource_loc = ResourceLocator.default() kwargs["prog"] = resource_loc.get_util_name() if not hasattr(kwargs, "usage"): kwargs["usage"] = resource_loc.get_synopsis() OptionParser.__init__(self, *args, **kwargs) self.add_my_options("debug_mode", "profile_mode", "quietness", "verbosity")
def get_prefix_locations(cls): """Return a dict containing the known prefixes and their repository locations. """ ret = {} config = ResourceLocator.default().get_conf() rosie_id_node = config.get(["rosie-id"], no_ignore=True) if rosie_id_node is None: return ret for key, node in rosie_id_node.value.items(): if node.state: continue if key.startswith("prefix-location."): ret[key[len("prefix-location."):]] = node.value return ret
def run(self, repos, revision, no_notification=False): """Update database with changes in a changeset.""" # Lookup prefix of repos # Do nothing if prefix is not registered conf = ResourceLocator.default().get_conf() metomi.rosie.db_node = conf.get(["rosie-db"], no_ignore=True) for key, node in metomi.rosie.db_node.value.items(): if node.is_ignored() or not key.startswith("repos."): continue if os.path.realpath(repos) == os.path.realpath(node.value): prefix = key[len("repos."):] break else: return # Locate Rosie DB of repos dao = RosieWriteDAO(conf.get_value(["rosie-db", "db." + prefix])) # Date-time of this commit os.environ["TZ"] = "UTC" date_time = self._svnlook("date", "-r", revision, repos).decode() date, dtime, _ = date_time.split(None, 2) date = mktime(strptime(" ".join([date, dtime, "UTC"]), self.DATE_FMT)) # Detail of changes changeset_attribs = { "repos": repos, "revision": revision, "prefix": prefix, "author": self._svnlook("author", "-r", revision, repos).decode("utf-8").strip(), "date": date } branch_attribs_dict = self._get_suite_branch_changes(repos, revision) for key, branch_attribs in sorted(branch_attribs_dict.items()): # Update known keys in suite info database meta table if branch_attribs["has_changed_known_keys_file"]: self._update_known_keys(dao, changeset_attribs) # Update suite info database self._update_info_db(dao, changeset_attribs, branch_attribs) # Notification on trunk changes # Notification on owner and access-list changes if not no_notification and branch_attribs["branch"] == "trunk": self._notify_trunk_changes(changeset_attribs, branch_attribs)
def _init_https_params(self): """Helper for __init__. Initialise HTTPS related parameters.""" res_loc = ResourceLocator.default() https_ssl_verify_mode_str = res_loc.default().get_conf().get_value( ["rosie-id", "prefix-https-ssl-verify." + self.prefix]) if https_ssl_verify_mode_str: https_ssl_verify_mode = ast.literal_eval(https_ssl_verify_mode_str) self.requests_kwargs["verify"] = bool(https_ssl_verify_mode) https_ssl_cert_str = res_loc.default().get_conf().get_value( ["rosie-id", "prefix-https-ssl-cert." + self.prefix]) if https_ssl_cert_str: https_ssl_cert = shlex.split(https_ssl_cert_str) if len(https_ssl_cert) == 1: self.requests_kwargs["cert"] = https_ssl_cert[0] else: self.requests_kwargs["cert"] = tuple(https_ssl_cert[0:2])
def get_local_copy_root(cls, user=None): """Return the root directory for hosting the local suite copies.""" config = ResourceLocator.default().get_conf() value = config.get_value(["rosie-id", "local-copy-root"]) if user: # N.B. Only default location at the moment. # In theory, we can try obtaining the setting from the user's # "~/.metomi/rose.conf", but it may contain environment variables # that are only correct in the user's environment. local_copy_root = os.path.expanduser( os.path.join("~" + user, "roses")) elif value: local_copy_root = metomi.rose.env.env_var_process(value) else: local_copy_root = os.path.expanduser(os.path.join("~", "roses")) return local_copy_root
def __init__( self, prefix, popen=None, prompt_func=None, event_handler=None ): self.prefix = prefix root = self._get_conf_value("ws") if root is None: raise UndefinedRosiePrefixWS(self.prefix) if not root.endswith("/"): root += "/" self.root = root urlparse_res = urlparse(self.root) self.scheme = urlparse_res[0] self.host = urlparse_res[1] self.password_orig = None self.username_orig = None self.password = None self.username = None if popen is None: popen = RosePopener() self.popen = popen self.prompt_func = prompt_func if event_handler is None: self.event_handler = Reporter() else: self.event_handler = event_handler res_loc = ResourceLocator.default() password_stores_str = ( res_loc.default() .get_conf() .get_value( keys=["rosie-id", "prefix-password-store." + self.prefix], default=self.PASSWORD_STORES_STR, ) ) for password_store_name in shlex.split(password_stores_str): password_store_cls = self.PASSWORD_STORE_CLASSES.get( password_store_name ) if password_store_cls is not None and password_store_cls.usable(): self.password_store = password_store_cls() break else: self.password_store = None self.requests_kwargs = {} self._init_https_params()
def get_checksum_func(algorithm=None): """Return a checksum function suitable for get_checksum. "algorithm" can be "mtime+size" or the name of a hash object from hashlib. If "algorithm" is not specified, return function to do MD5 checksum. Raise ValueError(algorithm) if "algorithm" is not a recognised hash object. """ if not algorithm: global _DEFAULT_KEY if _DEFAULT_KEY is None: _DEFAULT_KEY = ResourceLocator.default().get_conf().get_value( ["checksum-method"], _DEFAULT_DEFAULT_KEY) algorithm = _DEFAULT_KEY if algorithm == MTIME_AND_SIZE: return _mtime_and_size algorithm = algorithm.replace("sum", "") hashlib.new(algorithm) # raise ValueError for a bad "algorithm" string return lambda source, *_: _get_hexdigest(algorithm, source)
def _run_conf(cls, key, default=None, host=None, conf_tree=None, r_opts=None): """Return the value of a setting given by a key for a given host. If r_opts is defined, we are already in a remote host, so there is no need to do a host match. Otherwise, the setting may be found in the run time configuration, or the default (i.e. site/user configuration). The value of each setting in the configuration would be in a line delimited list of PATTERN=VALUE pairs. """ if r_opts is not None: return r_opts.get(key, default) if host is None: host = "localhost" for conf, keys in [(conf_tree.node, []), (ResourceLocator.default().get_conf(), ["rose-suite-run"])]: if conf is None: continue node_value = conf.get_value(keys + [key]) if node_value is None: continue for line in node_value.strip().splitlines(): pattern, value = line.strip().split("=", 1) if (pattern.startswith("jinja2:") or pattern.startswith("empy:")): section, name = pattern.rsplit(":", 1) p_node = conf.get([section, name], no_ignore=True) # Values in "jinja2:*" and "empy:*" sections are quoted. pattern = ast.literal_eval(p_node.value) if fnmatchcase(host, pattern): return value.strip() return default
def select( self, names=None, rank_method=None, thresholds=None, ssh_cmd_timeout=None, ): """Return a list. Element 0 is most desirable. Each element of the list is a tuple (host, score). names: a list of known host groups or host names. rank_method: the ranking method. Can be one of: load:1, load:5, load:15 (=load =default), fs:FS and random. The "load" methods determines the load using the average load as returned by the "uptime" command divided by the number of CPUs. The "fs" method determines the load using the usage in the file system specified by FS. The "mem" method ranks by highest free memory. The "random" method ranks everything by random. thresholds: a list of thresholds which each host must not exceed. Should be in the format rank_method:value, where rank_method is one of load:1, load:5, load:15 or fs:FS; and value is number that must be be exceeded. ssh_cmd_timeout: timeout of SSH commands to hosts. A float in seconds. """ host_names, rank_method, thresholds = self.expand( names, rank_method, thresholds) # Load scorers, ranking and thresholds rank_method_arg = None if rank_method: if ":" in rank_method: rank_method, rank_method_arg = rank_method.split(":", 1) else: rank_method = self.RANK_METHOD_DEFAULT rank_conf = ScorerConf(self.get_scorer(rank_method), rank_method_arg) self.handle_event(RankMethodEvent(rank_method, rank_method_arg)) threshold_confs = [] if thresholds: for threshold in thresholds: method = self.RANK_METHOD_DEFAULT method_arg = None value = threshold if ":" in threshold: head, value = threshold.rsplit(":", 1) method = head if ":" in head: method, method_arg = head.split(":", 1) try: float(value) except ValueError: raise ValueError(threshold) scorer = self.get_scorer(method) if method_arg is None: method_arg = scorer.ARG threshold_conf = ScorerConf(self.get_scorer(method), method_arg, value) threshold_confs.append(threshold_conf) if ssh_cmd_timeout is None: conf = ResourceLocator.default().get_conf() ssh_cmd_timeout = float( conf.get_value(["rose-host-select", "timeout"], self.SSH_CMD_TIMEOUT)) host_name_list = list(host_names) host_names = [] for host_name in host_name_list: if self.is_local_host(host_name): if self.get_local_host() not in host_names: host_names.append(self.get_local_host()) else: host_names.append(host_name) # Random selection with no thresholds. Return the 1st available host. if rank_conf.method == self.RANK_METHOD_RANDOM and not threshold_confs: shuffle(host_names) for host_name in host_names: if self.is_local_host(host_name): return [("localhost", 1)] command = self.popen.get_cmd("ssh", host_name, "true") proc = self.popen.run_bg(*command, preexec_fn=os.setpgrp) time0 = time() while (proc.poll() is None and time() - time0 <= ssh_cmd_timeout): sleep(self.SSH_CMD_POLL_DELAY) if proc.poll() is None: os.killpg(proc.pid, signal.SIGTERM) proc.wait() self.handle_event(TimedOutHostEvent(host_name)) elif proc.wait(): self.handle_event( HostSelectCommandFailedEvent(proc.returncode, host_name)) else: return [(host_name, 1)] else: raise NoHostSelectError() # ssh to each host to return its score(s). host_proc_dict = {} for host_name in sorted(host_names): # build host-select-client command command = [] if not self.is_local_host(host_name): command_args = [] command_args.append(host_name) command = self.popen.get_cmd("ssh", *command_args) # pass through CYLC_VERSION to support use of cylc wrapperf script try: import cylc.flow except ModuleNotFoundError: pass else: command.extend( ['env', f'CYLC_VERSION={cylc.flow.__version__}']) command.extend(["rose", "host-select-client"]) # build list of metrics to obtain for each host metrics = rank_conf.get_command() for threshold_conf in threshold_confs: for metric in threshold_conf.get_command(): if metric not in metrics: metrics.append(metric) # convert metrics list to JSON stdin stdin = '\n***start**\n' + json.dumps(metrics) + '\n**end**\n' # fire off host-select-client processes proc = self.popen.run_bg(*command, stdin=stdin, preexec_fn=os.setpgrp) proc.stdin.write(stdin.encode('UTF-8')) proc.stdin.flush() host_proc_dict[host_name] = (proc, metrics) # Retrieve score for each host name host_score_list = [] time0 = time() while host_proc_dict: sleep(self.SSH_CMD_POLL_DELAY) for host_name, (proc, metrics) in list(host_proc_dict.items()): if proc.poll() is None: score = None elif proc.wait(): stdout, stderr = (f.decode() for f in proc.communicate()) self.handle_event( HostSelectCommandFailedEvent(proc.returncode, host_name)) host_proc_dict.pop(host_name) else: out = proc.communicate()[0].decode() out = _deserialise(metrics, json.loads(out.strip())) host_proc_dict.pop(host_name) for threshold_conf in threshold_confs: try: score = threshold_conf.command_out_parser( out, metrics) is_bad = threshold_conf.check_threshold(score) except ValueError: is_bad = True score = None if is_bad: self.handle_event( HostThresholdNotMetEvent( host_name, threshold_conf, score)) break else: try: score = rank_conf.command_out_parser(out, metrics) host_score_list.append((host_name, score)) except ValueError: score = None self.handle_event( HostSelectScoreEvent(host_name, score)) if time() - time0 > ssh_cmd_timeout: break # Report timed out hosts for host_name, (proc, _) in sorted(host_proc_dict.items()): self.handle_event(TimedOutHostEvent(host_name)) os.killpg(proc.pid, signal.SIGTERM) proc.wait() if not host_score_list: raise NoHostSelectError() host_score_list.sort(key=lambda a: a[1], reverse=rank_conf.scorer.SIGN < 0) return host_score_list
def __init__(self, service_root_mode=False, *args, **kwargs): self.stopping = False self.service_root_mode = service_root_mode self.props = {} rose_conf = ResourceLocator.default().get_conf() self.props["title"] = rose_conf.get_value(["rosie-disco", "title"], self.TITLE) self.props["host_name"] = rose_conf.get_value(["rosie-disco", "host"]) if self.props["host_name"] is None: self.props["host_name"] = HostSelector().get_local_host() if self.props["host_name"] and "." in self.props["host_name"]: self.props["host_name"] = (self.props["host_name"].split( ".", 1)[0]) self.props["rose_version"] = ResourceLocator.default().get_version() # Get location of HTML files from package rosie_lib = os.path.join( pkg_resources.resource_filename('metomi.rosie', 'lib'), "html", "template", "rosie-disco") # Autoescape markup to prevent code injection from user inputs. self.props["template_env"] = jinja2.Environment( autoescape=jinja2.select_autoescape(enabled_extensions=("html", "xml"), default_for_string=True), loader=jinja2.FileSystemLoader(rosie_lib)) db_url_map = {} for key, node in rose_conf.get(["rosie-db"]).value.items(): if key.startswith("db.") and key[3:]: db_url_map[key[3:]] = node.value self.db_url_map = db_url_map # Specify the root URL for the handlers and template. ROOT = "%s-%s" % (self.NAMESPACE, self.UTIL) service_root = r"/?" if self.service_root_mode: service_root = service_root.replace("?", ROOT + r"/?") # Set-up the Tornado application request-handling structure. prefix_handlers = [] class_args = {"props": self.props} root_class_args = dict(class_args) # mutable so copy for safety root_class_args.update({"db_url_map": self.db_url_map}) root_handler = (service_root, RosieDiscoServiceRoot, root_class_args) for key, db_url in self.db_url_map.items(): prefix_class_args = dict(class_args) # mutable so copy for safety prefix_class_args.update({ "prefix": key, "db_url": db_url, "service_root": service_root, }) handler = (service_root + key + r"/?", RosieDiscoService, prefix_class_args) get_handler = (service_root + key + r"/get_(.+)", GetHandler, prefix_class_args) hello_handler = (service_root + key + r"/hello/?", HelloHandler, prefix_class_args) search_handler = (service_root + key + r"/search", SearchHandler, prefix_class_args) query_handler = (service_root + key + r"/query", QueryHandler, prefix_class_args) prefix_handlers.extend([ handler, get_handler, hello_handler, search_handler, query_handler ]) handlers = [root_handler] + prefix_handlers settings = dict( autoreload=True, static_path=ResourceLocator.default().get_util_home( "lib", "html", "static"), ) super(RosieDiscoServiceApplication, self).__init__(handlers, **settings)
def run_impl(self, opts, args, uuid, work_files): # Log file, temporary if hasattr(self.event_handler, "contexts"): t_file = TemporaryFile() log_context = ReporterContext(None, self.event_handler.VV, t_file) self.event_handler.contexts[uuid] = log_context # Check suite engine specific compatibility self.suite_engine_proc.check_global_conf_compat() # Suite name from the current working directory if opts.conf_dir: self.fs_util.chdir(opts.conf_dir) opts.conf_dir = os.getcwd() # --remote=KEY=VALUE,... if opts.remote: # opts.name always set for remote. return self._run_remote(opts, opts.name) conf_tree = self.config_load(opts) self.fs_util.chdir(conf_tree.conf_dirs[0]) suite_name = opts.name if not opts.name: suite_name = os.path.basename(os.getcwd()) # Check suite.rc #! line for template scheme templ_scheme = "jinja2" if self.suite_engine_proc.SUITE_CONF in conf_tree.files: suiterc_path = os.path.join( conf_tree.files[self.suite_engine_proc.SUITE_CONF], self.suite_engine_proc.SUITE_CONF) with open(suiterc_path) as fh: line = fh.readline() if line.startswith("#!"): templ_scheme = line[2:].strip().lower() suite_section = (templ_scheme + ':' + self.suite_engine_proc.SUITE_CONF) extra_defines = [] if opts.defines_suite: for define in opts.defines_suite: extra_defines.append("[" + suite_section + "]" + define) # Automatic Rose constants # ROSE_ORIG_HOST: originating host # ROSE_VERSION: Rose version (not retained in run_mode=="reload") # Suite engine version my_rose_version = ResourceLocator.default().get_version() suite_engine_key = self.suite_engine_proc.get_version_env_name() if opts.run_mode in ["reload", "restart"]: prev_config_path = self.suite_engine_proc.get_suite_dir( suite_name, "log", "rose-suite-run.conf") prev_config = ConfigLoader()(prev_config_path) suite_engine_version = prev_config.get_value( ["env", suite_engine_key]) else: suite_engine_version =\ self.suite_engine_proc.get_version().decode() resloc = ResourceLocator.default() auto_items = [(suite_engine_key, suite_engine_version), ("ROSE_ORIG_HOST", self.host_selector.get_local_host()), ("ROSE_SITE", resloc.get_conf().get_value(['site'], '')), ("ROSE_VERSION", resloc.get_version())] for key, val in auto_items: requested_value = conf_tree.node.get_value(["env", key]) if requested_value: if key == "ROSE_VERSION" and val != requested_value: exc = VersionMismatchError(requested_value, val) raise ConfigValueError(["env", key], requested_value, exc) val = requested_value else: conf_tree.node.set(["env", key], val, state=conf_tree.node.STATE_NORMAL) extra_defines.append('[%s]%s="%s"' % (suite_section, key, val)) # Pass automatic Rose constants as suite defines self.conf_tree_loader.node_loader.load(extra_defines, conf_tree.node) # See if suite is running or not if opts.run_mode == "reload": # Check suite is running self.suite_engine_proc.get_suite_contact(suite_name) else: self.suite_engine_proc.check_suite_not_running(suite_name) # Install the suite to its run location suite_dir_rel = self._suite_dir_rel(suite_name) # Unfortunately a large try/finally block to ensure a temporary folder # created in validate only mode is cleaned up. Exceptions are not # caught here try: # Process Environment Variables environ = self.config_pm(conf_tree, "env") if opts.validate_suite_only_mode: temp_dir = mkdtemp() suite_dir = os.path.join(temp_dir, suite_dir_rel) os.makedirs(suite_dir, 0o0700) else: suite_dir = os.path.join(os.path.expanduser("~"), suite_dir_rel) suite_conf_dir = os.getcwd() locs_conf = ConfigNode() if opts.new_mode: if os.getcwd() == suite_dir: raise NewModeError("PWD", os.getcwd()) elif opts.run_mode in ["reload", "restart"]: raise NewModeError("--run", opts.run_mode) self.suite_run_cleaner.clean(suite_name) if os.getcwd() != suite_dir: if opts.run_mode == "run": self._run_init_dir(opts, suite_name, conf_tree, locs_conf=locs_conf) os.chdir(suite_dir) # Housekeep log files now_str = None if not opts.install_only_mode and not opts.local_install_only_mode: now_str = datetime.utcnow().strftime("%Y%m%dT%H%M%SZ") self._run_init_dir_log(opts, now_str) self.fs_util.makedirs("log/suite") # Rose configuration and version logs self.fs_util.makedirs("log/rose-conf") run_mode = opts.run_mode if run_mode not in ["reload", "restart", "run"]: run_mode = "run" mode = run_mode if opts.validate_suite_only_mode: mode = "validate-suite-only" elif opts.install_only_mode: mode = "install-only" elif opts.local_install_only_mode: mode = "local-install-only" prefix = "rose-conf/%s-%s" % (strftime("%Y%m%dT%H%M%S"), mode) # Dump the actual configuration as rose-suite-run.conf ConfigDumper()(conf_tree.node, "log/" + prefix + ".conf") # Install version information file write_source_vc_info(suite_conf_dir, "log/" + prefix + ".version", self.popen) # If run through rose-stem, install version information # files for each source tree if they're a working copy if hasattr(opts, 'source') and hasattr(opts, 'project'): for i, url in enumerate(opts.source): if os.path.isdir(url): write_source_vc_info( url, "log/" + opts.project[i] + "-" + str(i) + ".version", self.popen) for ext in [".conf", ".version"]: self.fs_util.symlink(prefix + ext, "log/rose-suite-run" + ext) # Move temporary log to permanent log if hasattr(self.event_handler, "contexts"): log_file_path = os.path.abspath( os.path.join("log", "rose-suite-run.log")) log_file = open(log_file_path, "ab") temp_log_file = self.event_handler.contexts[uuid].handle temp_log_file.seek(0) log_file.write(temp_log_file.read()) self.event_handler.contexts[uuid].handle = log_file temp_log_file.close() # Process Files cwd = os.getcwd() for rel_path, conf_dir in conf_tree.files.items(): if (conf_dir == cwd or any( fnmatchcase(os.sep + rel_path, exclude) for exclude in self.SYNC_EXCLUDES) or conf_tree.node.get([templ_scheme + ":" + rel_path ]) is not None): continue # No sub-directories, very slow otherwise if os.sep in rel_path: rel_path = rel_path.split(os.sep, 1)[0] target_key = self.config_pm.get_handler( "file").PREFIX + rel_path target_node = conf_tree.node.get([target_key]) if target_node is None: conf_tree.node.set([target_key]) target_node = conf_tree.node.get([target_key]) elif target_node.is_ignored(): continue source_node = target_node.get("source") if source_node is None: target_node.set(["source"], os.path.join(conf_dir, rel_path)) elif source_node.is_ignored(): continue self.config_pm(conf_tree, "file", no_overwrite_mode=opts.no_overwrite_mode) # Process suite configuration template header # (e.g. Jinja2:suite.rc, EmPy:suite.rc) self.config_pm(conf_tree, templ_scheme, environ=environ) # Ask suite engine to parse suite configuration # and determine if it is up to date (unchanged) if opts.validate_suite_only_mode: suite_conf_unchanged = self.suite_engine_proc.cmp_suite_conf( suite_dir, None, opts.strict_mode, debug_mode=True) else: suite_conf_unchanged = self.suite_engine_proc.cmp_suite_conf( suite_name, opts.run_mode, opts.strict_mode, opts.debug_mode) finally: # Ensure the temporary directory created is cleaned up regardless # of success or failure if opts.validate_suite_only_mode and os.path.exists(temp_dir): shutil.rmtree(temp_dir) # Only validating so finish now if opts.validate_suite_only_mode: return # Install share/work directories (local) for name in ["share", "share/cycle", "work"]: self._run_init_dir_work(opts, suite_name, name, conf_tree, locs_conf=locs_conf) if opts.local_install_only_mode: return # Install suite files to each remote [user@]host for name in ["", "log/", "share/", "share/cycle/", "work/"]: uuid_file = os.path.abspath(name + uuid) open(uuid_file, "w").close() work_files.append(uuid_file) # Install items to user@host auths = self.suite_engine_proc.get_tasks_auths(suite_name) proc_queue = [] # [[proc, command, "ssh"|"rsync", auth], ...] for auth in sorted(auths): host = auth if "@" in auth: host = auth.split("@", 1)[1] # Remote shell command = self.popen.get_cmd("ssh", "-n", auth) # Provide ROSE_VERSION and CYLC_VERSION in the environment shcommand = "env ROSE_VERSION=%s %s=%s" % ( my_rose_version, suite_engine_key, suite_engine_version) # Use login shell? no_login_shell = self._run_conf("remote-no-login-shell", host=host, conf_tree=conf_tree) if not no_login_shell or no_login_shell.lower() != "true": shcommand += r""" bash -l -c '"$0" "$@"'""" # Path to "rose" command, if applicable rose_bin = self._run_conf("remote-rose-bin", host=host, conf_tree=conf_tree, default="rose") # Build remote "rose suite-run" command shcommand += " %s suite-run -vv -n %s" % (rose_bin, suite_name) for key in ["new", "debug", "install-only"]: attr = key.replace("-", "_") + "_mode" if getattr(opts, attr, None) is not None: shcommand += " --%s" % key if opts.log_keep: shcommand += " --log-keep=%s" % opts.log_keep if opts.log_name: shcommand += " --log-name=%s" % opts.log_name if not opts.log_archive_mode: shcommand += " --no-log-archive" shcommand += " --run=%s" % opts.run_mode # Build --remote= option shcommand += " --remote=uuid=%s" % uuid if now_str is not None: shcommand += ",now-str=%s" % now_str host_confs = [ "root-dir", "root-dir{share}", "root-dir{share/cycle}", "root-dir{work}" ] locs_conf.set([auth]) for key in host_confs: value = self._run_conf(key, host=host, conf_tree=conf_tree) if value is not None: val = self.popen.list_to_shell_str([str(value)]) shcommand += ",%s=%s" % (key, pipes.quote(val)) locs_conf.set([auth, key], value) command.append(shcommand) proc = self.popen.run_bg(*command) proc_queue.append([proc, command, "ssh", auth]) while proc_queue: sleep(self.SLEEP_PIPE) proc, command, command_name, auth = proc_queue.pop(0) if proc.poll() is None: # put it back in proc_queue proc_queue.append([proc, command, command_name, auth]) continue ret_code = proc.wait() out, err = proc.communicate() ret_code, out, err = [ i.decode() if isinstance(i, bytes) else i for i in [ret_code, out, err] ] if ret_code: raise RosePopenError(command, ret_code, out, err) if command_name == "rsync": self.handle_event(out, level=Event.VV) continue else: self.handle_event(out, level=Event.VV, prefix="[%s] " % auth) for line in out.split("\n"): if "/" + uuid == line.strip(): locs_conf.unset([auth]) break else: filters = {"excludes": [], "includes": []} for name in ["", "log/", "share/", "share/cycle/", "work/"]: filters["excludes"].append(name + uuid) target = auth + ":" + suite_dir_rel cmd = self._get_cmd_rsync(target, **filters) proc_queue.append( [self.popen.run_bg(*cmd), cmd, "rsync", auth]) # Install ends ConfigDumper()(locs_conf, os.path.join("log", "rose-suite-run.locs")) if opts.install_only_mode: return elif opts.run_mode == "reload" and suite_conf_unchanged: conf_name = self.suite_engine_proc.SUITE_CONF self.handle_event(SkipReloadEvent(suite_name, conf_name)) return # Start the suite self.fs_util.chdir("log") self.suite_engine_proc.run(suite_name, opts.host, opts.run_mode, args) # Disconnect log file handle, so monitoring tool command will no longer # be associated with the log file. self.event_handler.contexts[uuid].handle.close() self.event_handler.contexts.pop(uuid) return 0
def get_prepend_paths(event_handler=None, path_root=None, path_glob_args=None, full_mode=False): """Return map of PATH-like env-var names to path lists to prepend to them. event_handler -- An instance of metomi.rose.reporter.Reporter or an object with a similar interface. path_root -- If a glob is relative and this is defined, this is the root directory of the relative path. path_glob_args -- A list of strings in the form GLOB or NAME=GLOB. NAME is "PATH" by default or should be PATH-like environment variable name. GLOB should be a glob pattern for matching file system paths to prepend to NAME. full_mode -- If True, prepend relevant paths in site/user configuration and the setting defined in "rose.task_env.PATH_GLOBS". Return something like: {"PATH": ["/opt/foo/bin", "/opt/bar/bin"], # ... and so on } """ prepend_paths_map = {} # site/user configuration if full_mode: conf = ResourceLocator.default().get_conf() my_conf = conf.get(["rose-task-run"], no_ignore=True) if my_conf is not None: for key, node in sorted(my_conf.value.items()): if not key.startswith("path-prepend") or node.is_ignored(): continue env_key = "PATH" if key != "path-prepend": env_key = key[len("path-prepend."):] values = [] for value in node.value.split(): if os.path.exists(value): values.append(value) if values: prepend_paths_map[env_key] = values # Default or specified globs path_globs_map = {} if full_mode: for name, path_globs in PATH_GLOBS.items(): path_globs_map[name] = path_globs if path_glob_args: for path_glob_arg in path_glob_args: if path_glob_arg is None: continue if "=" in path_glob_arg: name, value = path_glob_arg.split("=", 1) else: name, value = "PATH", path_glob_arg if name not in path_globs_map: path_globs_map[name] = [] path_globs_map[name].append(value) more_prepend_paths_map = {} if not path_root: path_root = os.getcwd() for name, path_globs in path_globs_map.items(): if name not in more_prepend_paths_map: more_prepend_paths_map[name] = [] for path_glob in path_globs: if path_glob: if path_glob.startswith("~"): path_glob = os.path.expanduser(path_glob) if not os.path.isabs(path_glob): path_glob = os.path.join(path_root, path_glob) for path in sorted(glob(path_glob)): more_prepend_paths_map[name].append(path) else: more_prepend_paths_map[name] = [] # empty value resets for name, more_prepend_paths in more_prepend_paths_map.items(): if name in prepend_paths_map: prepend_paths_map[name].extend(more_prepend_paths) elif more_prepend_paths: prepend_paths_map[name] = more_prepend_paths for key, prepend_paths in prepend_paths_map.items(): prepend_paths.reverse() return prepend_paths_map
def _notify_trunk_changes(self, changeset_attribs, branch_attribs): """Email owner and/or access-list users on changes to trunk.""" # Notify only if users' email addresses can be determined conf = ResourceLocator.default().get_conf() user_tool_name = conf.get_value(["rosa-svn", "user-tool"]) if not user_tool_name: return notify_who_str = conf.get_value( ["rosa-svn", "notify-who-on-trunk-commit"], "") if not notify_who_str.strip(): return notify_who = shlex.split(notify_who_str) # Build the message text info_file_path = "%s/trunk/%s" % ( "/".join(branch_attribs["sid"]), self.INFO_FILE, ) text = "" for changed_line in branch_attribs["changed_lines"]: text += changed_line # For suite info file change, add diff as well if (changed_line[4:].strip() == info_file_path and branch_attribs["status_info_file"] == self.ST_MODIFIED): old_strio = StringIO() metomi.rose.config.dump(branch_attribs["old_info"], old_strio) new_strio = StringIO() metomi.rose.config.dump(branch_attribs["info"], new_strio) for diff_line in unified_diff( old_strio.getvalue().splitlines(True), new_strio.getvalue().splitlines(True), "@%d" % (int(changeset_attribs["revision"]) - 1), "@%d" % (int(changeset_attribs["revision"])), ): text += " " * 4 + diff_line # Determine who to notify users = set() for key in ["old_info", "info"]: if branch_attribs[key] is not None: info_conf = branch_attribs[key] if "owner" in notify_who: users.add(info_conf.get_value(["owner"])) if "access-list" in notify_who: users.update( info_conf.get_value(["access-list"], "").split()) users.discard("*") # Determine email addresses user_tool = self.usertools_manager.get_handler(user_tool_name) if "author" in notify_who: users.add(changeset_attribs["author"]) else: users.discard(changeset_attribs["author"]) emails = sorted(user_tool.get_emails(users)) if not emails: return # Send notification msg = MIMEText(text) msg.set_charset("utf-8") msg["From"] = conf.get_value( ["rosa-svn", "notification-from"], "notications@" + socket.getfqdn(), ) msg["To"] = ", ".join(emails) msg["Subject"] = "%s-%s/trunk@%d" % ( changeset_attribs["prefix"], branch_attribs["sid"], int(changeset_attribs["revision"]), ) smtp_host = conf.get_value(["rosa-svn", "smtp-host"], default="localhost") smtp = SMTP(smtp_host) smtp.sendmail(msg["From"], emails, msg.as_string()) smtp.quit()
def _read_auto_opts(self): """Read the site metomi.rose.conf file.""" return ResourceLocator.default().get_conf().get_value( ["rose-stem", "automatic-options"])
def _prompt(self, is_retry=False): """Prompt for the username and password, where necessary. Prompt with zenity or raw_input/getpass. """ if callable(self.prompt_func) and not hasattr( self.password_store, "prompt_password" ): self.username, self.password = self.prompt_func( self.username, self.password, is_retry ) return icon_path = ResourceLocator.default().locate("images/rosie-icon.png") if is_retry: username = "" prompt = self.PROMPT_USERNAME % { "prefix": self.prefix, "root": self.root, } if self.popen.which("zenity") and os.getenv("DISPLAY"): username = self.popen.run( "zenity", "--entry", "--title=Rosie", "--window-icon=" + icon_path, "--text=" + prompt, )[1].strip() else: username = input(prompt) if not username: raise KeyboardInterrupt(self.STR_CANCELLED) if username and username != self.username: self.username = username self._load_password(is_retry) if self.password: return if self.username and self.password is None or is_retry: prompt = self.PROMPT_PASSWORD % { "prefix": self.prefix, "root": self.root, "username": self.username, } password = None need_prompting = True if hasattr(self.password_store, "prompt_password"): try: password = self.password_store.prompt_password( prompt, self.scheme, self.host, self.username ) except RosieStoreRetrievalError as exc: self.event_handler(exc) else: need_prompting = False if not password and need_prompting: if self.popen.which("zenity") and os.getenv("DISPLAY"): password = self.popen.run( "zenity", "--entry", "--hide-text", "--title=Rosie", "--window-icon=" + icon_path, "--text=" + prompt, )[1].strip() else: password = getpass(prompt) if not password: raise KeyboardInterrupt(self.STR_CANCELLED) if password and password != self.password: self.password = password
def main(): """Implement the "rose config" command.""" opt_parser = RoseOptionParser() opt_parser.add_my_options("default", "env_var_process_mode", "files", "keys", "meta", "meta_key", "no_ignore", "no_opts", "print_conf_mode") opts, args = opt_parser.parse_args() report = Reporter(opts.verbosity - opts.quietness) metomi.rose.macro.add_meta_paths() if opts.meta_key: opts.meta = True if opts.files and opts.meta_key: report(Exception("Cannot specify both a file and meta key.")) sys.exit(1) config_loader = ConfigLoader() sources = [] if opts.files: root_node = ConfigNode() for fname in opts.files: if fname == "-": sources.append(sys.stdin) else: if opts.meta: try: root_node = config_loader.load(fname) except ConfigSyntaxError as exc: report(exc) sys.exit(1) rel_path = os.sep.join(fname.split(os.sep)[:-1]) fpath = get_meta_path(root_node, rel_path) if fpath is None: report(MetadataNotFoundEvent(fname)) else: sources.append(fpath) else: sources.append(fname) elif opts.meta: root_node = ConfigNode() if opts.meta_key: root_node.set(["meta"], opts.meta_key) else: fname = os.path.join(os.getcwd(), metomi.rose.SUB_CONFIG_NAME) try: root_node = config_loader.load(fname) except ConfigSyntaxError as exc: report(exc) sys.exit(1) fpath = get_meta_path(root_node, meta_key=opts.meta_key) root_node.unset(["meta"]) if fpath is None: report(Exception("Metadata not found")) sys.exit(1) else: sources.append(fpath) else: root_node = ResourceLocator.default().get_conf() for source in sources: try: if opts.meta or opts.no_opts: config_loader.load(source, root_node) else: config_loader.load_with_opts(source, root_node) except (ConfigSyntaxError, IOError) as exc: report(exc) sys.exit(1) if source is sys.stdin: source.close() if opts.quietness: sys.exit(root_node.get(args, opts.no_ignore) is None) if opts.keys_mode: try: keys = list(root_node.get(args, opts.no_ignore).value) except AttributeError: sys.exit(1) keys.sort() for key in keys: print(key) sys.exit() conf_dump = ConfigDumper() if len(args) == 0: conf_dump(root_node, concat_mode=opts.print_conf_mode) sys.exit() node = root_node.get(args, opts.no_ignore) if node is not None and isinstance(node.value, dict): if opts.print_conf_mode: conf_dump(ConfigNode().set(args, node.value), concat_mode=True) sys.exit() keys = list(node.value) keys.sort() for key in keys: node_of_key = node.get([key], opts.no_ignore) if node_of_key: value = node_of_key.value state = node_of_key.state string = "%s%s=%s" % (state, key, value) lines = string.splitlines() print(lines[0]) i_equal = len(state + key) + 1 for line in lines[1:]: print(" " * i_equal + line) sys.exit() if node is None: if opts.default is None: sys.exit(1) value = opts.default elif opts.env_var_process_mode: value = env_var_process(node.value) else: value = node.value if opts.print_conf_mode: conf_dump(ConfigNode().set(args, value), concat_mode=True) else: print(value) sys.exit()
def generate_info_config(self, from_id=None, prefix=None, project=None): """Generate a metomi.rose.config.ConfigNode for a rose-suite.info. This is suitable for passing into the create method of this class. If from_id is defined, copy items from it. Return the metomi.rose.config.ConfigNode instance. """ from_project = None from_title = None if from_id is not None: from_info_url = "%s/%s/rose-suite.info@%s" % ( from_id.to_origin(), from_id.branch, from_id.revision, ) out_data = self.popen("svn", "cat", from_info_url)[0] from_config = metomi.rose.config.load(StringIO(out_data)) res_loc = ResourceLocator.default() older_config = None info_config = metomi.rose.config.ConfigNode() # Determine project if given as a command-line option on create if from_id is None and project is not None: info_config.set(["project"], project) # Set the compulsory fields and use the project and metadata if # available. meta_config = load_meta_config( info_config, config_type=metomi.rose.INFO_CONFIG_NAME) if from_id is None and project is not None: for node_keys, node in meta_config.walk(no_ignore=True): if isinstance(node.value, dict): continue sect, key = node_keys value = node.value sect = sect.translate(None, "=") if key == "compulsory" and value == "true": info_config.set([sect], "") info_config.set(["project"], project) else: if from_project is None: info_config.set(["project"], "") if from_title is None: info_config.set(["title"], "") # Determine prefix if prefix is None: if from_id is None: prefix = SuiteId.get_prefix_default() else: prefix = from_id.prefix # Determine owner: # 1. From user configuration [rosie-id]prefix-username # 2. From username of a matching group in [groups] in # ~/.subversion/servers # 3. Current user ID owner = res_loc.get_conf().get_value( ["rosie-id", "prefix-username." + prefix]) if not owner and self.subversion_servers_conf: servers_conf = metomi.rose.config.load( self.subversion_servers_conf) groups_node = servers_conf.get(["groups"]) if groups_node is not None: prefix_loc = SuiteId.get_prefix_location(prefix) prefix_host = urlparse(prefix_loc).hostname for key, node in groups_node.value.items(): if fnmatch(prefix_host, node.value): owner = servers_conf.get_value([key, "username"]) break if not owner: owner = pwd.getpwuid(os.getuid())[0] info_config.set(["owner"], owner) # Copy description try: from_id.to_string_with_version() info_config.set( ["description"], "Copy of %s" % (from_id.to_string_with_version()), ) except AttributeError: pass # Copy fields provided by the user try: from_config.walk(no_ignore=False) for node_keys, node in from_config.walk(no_ignore=False): if isinstance(node.value, dict): continue sect, key = node_keys value = node.value if key in [ "description", "owner", "access-list" ] or (key == "project" and from_project is not None): pass else: info_config.set([key], value) except UnboundLocalError: pass # Determine access list access_list_str = res_loc.get_conf().get_value( ["rosie-vc", "access-list-default"]) if access_list_str: info_config.set(["access-list"], access_list_str) if from_id is None and project is not None: for node_keys, node in meta_config.walk(no_ignore=True): if isinstance(node.value, dict): continue sect, key = node_keys value = node.value sect = sect.translate(None, "=") if key == "value-hints" or key == "values": reminder = ("please remove all commented hints/lines " + "in the main/top section before saving.") info_config.set( [sect], metomi.rose.variable.array_split(value)[0], comments=[value, reminder], ) if older_config is not None: for node_keys, node in older_config.walk(no_ignore=True): if isinstance(node.value, dict): continue sect, key = node_keys value = node.value info_config.set([key], value) return info_config