def _get_tests(conf_tree): """Return the poll tests configuration.""" poll_test = conf_tree.node.get_value(["poll", "test"]) poll_all_files_value = conf_tree.node.get_value(["poll", "all-files"]) poll_all_files = [] if poll_all_files_value: try: poll_all_files = shlex.split( env_var_process(poll_all_files_value)) except UnboundEnvironmentVariableError as exc: raise ConfigValueError(["poll", "all-files"], poll_all_files_value, exc) poll_any_files_value = conf_tree.node.get_value(["poll", "any-files"]) poll_any_files = [] if poll_any_files_value: try: poll_any_files = shlex.split( env_var_process(poll_any_files_value)) except UnboundEnvironmentVariableError as exc: raise ConfigValueError(["poll", "any-files"], poll_any_files_value, exc) poll_file_test = None if poll_all_files or poll_any_files: poll_file_test = conf_tree.node.get_value(["poll", "file-test"]) if poll_file_test and "{}" not in poll_file_test: raise ConfigValueError(["poll", "file-test"], poll_file_test, ConfigValueError.SYNTAX) return poll_test, poll_file_test, poll_all_files, poll_any_files
def process(self, config, item, orig_keys=None, orig_value=None, **kwargs): if item.endswith("(:)"): name = item[0:-2] sections = filter(lambda key: key.startswith(name), config.value.keys()) else: sections = filter(lambda key: key == item, config.value.keys()) if not sections: e = UnknownContentError(item) raise ConfigProcessError(orig_keys, orig_value, e) if item.endswith("(:)"): sections.sort(rose.config.sort_settings) ret = "" for section in sections: section_node = config.get([section], no_ignore=True) if section_node.state: continue group = RE_NAMELIST_GROUP.match(section).group(1) nlg = "&" + group + "\n" for key, node in sorted(section_node.value.items()): try: value = env_var_process(node.value) except UnboundEnvironmentVariableError as e: raise ConfigProcessError([section, key], node.value, e) else: nlg += "%s=%s,\n" % (key, value) nlg += "/" + "\n" self.manager.event_handler(ConfigProcessNamelistEvent(nlg)) ret += nlg return ret
def _run(self, dao, app_runner, config): """Transform and archive suite files. This application is designed to work under "rose task-run" in a suite. """ compress_manager = SchemeHandlersManager( [os.path.dirname(os.path.dirname(sys.modules["rose"].__file__))], "rose.apps.rose_arch_compressions", ["compress_sources"], None, app_runner) # Set up the targets s_key_tails = set() targets = [] for t_key, t_node in sorted(config.value.items()): if t_node.is_ignored() or ":" not in t_key: continue s_key_head, s_key_tail = t_key.split(":", 1) if s_key_head != self.SECTION or not s_key_tail: continue # Determine target path. s_key_tail = t_key.split(":", 1)[1] try: s_key_tail = env_var_process(s_key_tail) except UnboundEnvironmentVariableError as exc: raise ConfigValueError([t_key, ""], "", exc) # If parenthesised target is optional. is_compulsory_target = True if s_key_tail.startswith("(") and s_key_tail.endswith(")"): s_key_tail = s_key_tail[1:-1] is_compulsory_target = False # Don't permit duplicate targets. if s_key_tail in s_key_tails: raise RoseArchDuplicateError([t_key], '', s_key_tail) else: s_key_tails.add(s_key_tail) target = self._run_target_setup(app_runner, compress_manager, config, t_key, s_key_tail, t_node, is_compulsory_target) old_target = dao.select(target.name) if old_target is None or old_target != target: dao.delete(target) else: target.status = target.ST_OLD targets.append(target) targets.sort(key=lambda target: target.name) # Delete from database items that are no longer relevant dao.delete_all(filter_targets=targets) # Update the targets for target in targets: self._run_target_update(dao, app_runner, compress_manager, target) return [target.status for target in targets].count(RoseArchTarget.ST_BAD)
def load_tasks(self): """Loads AnalysisTasks from files. Given a list of files, return AnalysisTasks generated from those files. This also expands environment variables in filenames, but saves the original contents for use when writing out config files """ tasks = [] for task in self.config.value.keys(): if task is "env": continue if task.startswith("file:"): continue newtask = AnalysisTask() newtask.name = task value = self.config.get_value([task, "resultfile"]) # If the task is ignored, this will be None, so continue # on to the next task if value is None: continue if "{}" in value: newtask.resultfile = value.replace("{}", self.args[0]) else: newtask.resultfile = value newtask = self._find_file("result", newtask) newtask.extract = self.config.get_value([task, "extract"]) result = re.search(r":", newtask.extract) if result: newtask.subextract = ":".join(newtask.extract.split(":")[1:]) newtask.extract = newtask.extract.split(":")[0] newtask.comparison = self.config.get_value([task, "comparison"]) newtask.tolerance = env_var_process(self.config.get_value( [task, "tolerance"])) newtask.warnonfail = ( self.config.get_value([task, "warnonfail"]) in ["yes", "true"]) # Allow for multiple KGO, e.g. kgo1file, kgo2file, for # statistical comparisons of results newtask.numkgofiles = 0 for i in range(1, MAX_KGO_FILES): kgovar = "kgo" + str(i) kgofilevar = kgovar + "file" if self.config.get([task, kgofilevar]): value = self.config.get([task, kgofilevar])[:] if "{}" in value: setattr( newtask, kgofilevar, value.replace("{}", self.args[0])) else: setattr(newtask, kgofilevar, value) newtask.numkgofiles += 1 newtask = self._find_file(kgovar, newtask) else: break tasks.append(newtask) self.tasks = tasks return tasks
def process(self, conf_tree, item, orig_keys=None, orig_value=None, **kwargs): """Export environment variables in an [env] in "conf_tree.node".""" env_node = conf_tree.node.get([item], no_ignore=True) if env_node is None: return if os.environ.has_key("UNDEF"): os.environ.pop("UNDEF") environ = {} if env_node and not env_node.state: for key, node in env_node.value.iteritems(): if node.state: continue try: environ[key] = env_var_process(node.value) except UnboundEnvironmentVariableError as e: raise ConfigProcessError([item, key], node.value, e) environ[key] = os.path.expanduser(environ[key]) # ~ expansion for key, value in sorted(environ.items()): env_export(key, value, self.manager.event_handler) return environ
def _run_init_dir_work(self, opts, suite_name, name, conf_tree=None, r_opts=None, locs_conf=None): """Create a named suite's directory.""" item_path = os.path.realpath(name) item_path_source = item_path key = "root-dir-" + name item_root = self._run_conf(key, conf_tree=conf_tree, r_opts=r_opts) if item_root is not None: if locs_conf is not None: locs_conf.set(["localhost", key], item_root) item_root = env_var_process(item_root) suite_dir_rel = self._suite_dir_rel(suite_name) item_path_source = os.path.join(item_root, suite_dir_rel, name) item_path_source = os.path.realpath(item_path_source) if item_path == item_path_source: if opts.new_mode: self.fs_util.delete(name) self.fs_util.makedirs(name) else: if opts.new_mode: self.fs_util.delete(item_path_source) self.fs_util.makedirs(item_path_source) self.fs_util.symlink(item_path_source, name, opts.no_overwrite_mode)
def _run_init_dir(self, opts, suite_name, conf_tree=None, r_opts=None, locs_conf=None): """Create the suite's directory.""" suite_dir_rel = self._suite_dir_rel(suite_name) home = os.path.expanduser("~") suite_dir_root = self._run_conf("root-dir", conf_tree=conf_tree, r_opts=r_opts) if suite_dir_root: if locs_conf is not None: locs_conf.set(["localhost", "root-dir"], suite_dir_root) suite_dir_root = env_var_process(suite_dir_root) suite_dir_home = os.path.join(home, suite_dir_rel) if (suite_dir_root and os.path.realpath(home) != os.path.realpath(suite_dir_root)): suite_dir_real = os.path.join(suite_dir_root, suite_dir_rel) self.fs_util.makedirs(suite_dir_real) self.fs_util.symlink(suite_dir_real, suite_dir_home, opts.no_overwrite_mode) else: self.fs_util.makedirs(suite_dir_home)
def main(): """Implement "rose env-cat".""" opt_parser = RoseOptionParser() opt_parser.add_my_options("unbound") opts, args = opt_parser.parse_args() if not args: args = ["-"] for arg in args: if arg == "-": f = sys.stdin else: f = open(arg) line_num = 0 while True: line_num += 1 line = f.readline() if not line: break try: sys.stdout.write(env_var_process(line, opts.unbound)) except UnboundEnvironmentVariableError as e: name = arg if arg == "-": name = "<STDIN>" sys.exit("%s:%s: %s" % (name, line_num, str(e))) f.close()
def start(is_main=False): """Create the server. If is_main, invoke cherrypy.quickstart. Otherwise, return a cherrypy.Application instance. """ # Environment variables (not normally defined in WSGI mode) if os.getenv("ROSE_HOME") is None: path = os.path.abspath(__file__) while os.path.dirname(path) != path: # not root if os.path.basename(path) == "lib": os.environ["ROSE_HOME"] = os.path.dirname(path) break path = os.path.dirname(path) for key, value in [("ROSE_NS", "rosa"), ("ROSE_UTIL", "ws")]: if os.getenv(key) is None: os.environ[key] = value # CherryPy quick server configuration rose_conf = ResourceLocator.default().get_conf() if is_main and rose_conf.get_value(["rosie-ws", "log-dir"]) is not None: log_dir_value = rose_conf.get_value(["rosie-ws", "log-dir"]) log_dir = env_var_process(os.path.expanduser(log_dir_value)) if not os.path.isdir(log_dir): os.makedirs(log_dir) log_file = os.path.join(log_dir, "server.log") log_error_file = os.path.join(log_dir, "server.err.log") cherrypy.config["log.error_file"] = log_error_file cherrypy.config["log.access_file"] = log_file cherrypy.config["request.error_response"] = _handle_error cherrypy.config["log.screen"] = False # Configuration for dynamic pages db_url_map = {} for key, node in rose_conf.get(["rosie-db"]).value.items(): if key.startswith("db.") and key[3:]: db_url_map[key[3:]] = node.value res_loc = ResourceLocator.default() html_lib = res_loc.get_util_home("lib", "html") icon_path = res_loc.locate("images/rosie-icon-trim.png") tmpl_loader = jinja2.FileSystemLoader(os.path.join(html_lib, "rosie-ws")) root = Root(jinja2.Environment(loader=tmpl_loader), db_url_map) # Configuration for static pages config = {"/etc": { "tools.staticdir.dir": os.path.join(html_lib, "external"), "tools.staticdir.on": True}, "/favicon.ico": { "tools.staticfile.on": True, "tools.staticfile.filename": icon_path}} if is_main: port = int(rose_conf.get_value(["rosie-ws", "port"], 8080)) config.update({"global": {"server.socket_host": "0.0.0.0", "server.socket_port": port}}) # Start server or return WSGI application if is_main: return cherrypy.quickstart(root, "/", config=config) else: return cherrypy.Application(root, script_name=None, config=config)
def main(): """Implement "rose env-cat".""" opt_parser = RoseOptionParser() opt_parser.add_my_options("match_mode", "output_file", "unbound") opts, args = opt_parser.parse_args() if not args: args = ["-"] if not opts.output_file or opts.output_file == "-": out_handle = sys.stdout else: out_handle = open(opts.output_file, "wb") for arg in args: if arg == "-": in_handle = sys.stdin else: in_handle = open(arg) line_num = 0 while True: line_num += 1 line = in_handle.readline() if not line: break try: out_handle.write( env_var_process(line, opts.unbound, opts.match_mode)) except UnboundEnvironmentVariableError as exc: name = arg if arg == "-": name = "<STDIN>" sys.exit("%s:%s: %s" % (name, line_num, str(exc))) in_handle.close() out_handle.close()
def load_tasks(self): """Loads AnalysisTasks from files. Given a list of files, return AnalysisTasks generated from those files. This also expands environment variables in filenames, but saves the original contents for use when writing out config files """ tasks = [] for task in self.config.value.keys(): if task is "env": continue if task.startswith("file:"): continue newtask = AnalysisTask() newtask.name = task value = self.config.get_value([task, "resultfile"]) # If the task is ignored, this will be None, so continue # on to the next task if value is None: continue if "{}" in value: newtask.resultfile = value.replace("{}", self.args[0]) else: newtask.resultfile = value newtask = self._find_file("result", newtask) newtask.extract = self.config.get_value([task, "extract"]) result = re.search(r":", newtask.extract) if result: newtask.subextract = ":".join(newtask.extract.split(":")[1:]) newtask.extract = newtask.extract.split(":")[0] newtask.comparison = self.config.get_value([task, "comparison"]) newtask.tolerance = env_var_process( self.config.get_value([task, "tolerance"])) newtask.warnonfail = (self.config.get_value([task, "warnonfail"]) in ["yes", "true"]) # Allow for multiple KGO, e.g. kgo1file, kgo2file, for # statistical comparisons of results newtask.numkgofiles = 0 for i in range(1, MAX_KGO_FILES): kgovar = "kgo" + str(i) kgofilevar = kgovar + "file" if self.config.get([task, kgofilevar]): value = self.config.get([task, kgofilevar])[:] if "{}" in value: setattr(newtask, kgofilevar, value.replace("{}", self.args[0])) else: setattr(newtask, kgofilevar, value) newtask.numkgofiles += 1 newtask = self._find_file(kgovar, newtask) else: break tasks.append(newtask) self.tasks = tasks return tasks
def _get_conf(self, conf_tree, key, max_args=0): """Get a list of cycles from a configuration setting. key -- An option key in self.SECTION to locate the setting. max_args -- Maximum number of extra arguments for an item in the list. The value of the setting is expected to be split by shlex.split into a list of items. If max_args == 0, an item should be a string representing a cycle or an cycle offset. If max_args > 0, the cycle or cycle offset string can, optionally, have arguments. The arguments are delimited by colons ":". E.g.: prune-remote-logs-at=-6h -12h prune-server-logs-at=-7d prune-datac-at=-6h:foo/* -12h:'bar/* baz/*' -1d prune-work-at=-6h:t1*:*.tar -12h:t1*: -12h:*.gz -1d If max_args == 0, return a list of cycles. If max_args > 0, return a list of (cycle, [arg, ...]) """ items_str = conf_tree.node.get_value([self.SECTION, key]) if items_str is None: return [] try: items_str = env_var_process(items_str) except UnboundEnvironmentVariableError as exc: raise ConfigValueError([self.SECTION, key], items_str, exc) items = [] ref_time_point = os.getenv( RoseDateTimeOperator.TASK_CYCLE_TIME_MODE_ENV) date_time_oper = RoseDateTimeOperator(ref_time_point=ref_time_point) for item_str in shlex.split(items_str): args = item_str.split(":", max_args) offset = args.pop(0) cycle = offset if ref_time_point: if os.getenv("ROSE_CYCLING_MODE") == "integer": try: cycle = str(int(ref_time_point) + int(offset.replace("P",""))) except ValueError: pass else: try: time_point, parse_format = date_time_oper.date_parse() time_point = date_time_oper.date_shift(time_point, offset) cycle = date_time_oper.date_format( parse_format, time_point) except ValueError: pass if max_args: items.append((cycle, args)) else: items.append(cycle) return items
def _get_conf_value(self, name, default=None): """Return the value of a named conf setting for this prefix.""" conf = ResourceLocator.default().get_conf() value = conf.get_value( ["rosie-id", "prefix-%s.%s" % (name, self.prefix)], default=default) if value: value = env_var_process(value) return value
def _get_prune_globs(self, app_runner, conf_tree): """Return prune globs.""" globs = [] nodes = conf_tree.node.get_value([self.SECTION]) if nodes is None: return [] cycle_formats = {} for key, node in nodes.items(): if node.is_ignored(): continue if key.startswith("cycle-format{") and key.endswith("}"): fmt = key[len("cycle-format{"):-1] try: cycle_formats[fmt] = env_var_process(node.value) # Check formats are valid if self._get_cycling_mode() == "integer": cycle_formats[fmt] % 0 else: app_runner.date_time_oper.date_format( cycle_formats[fmt]) except (UnboundEnvironmentVariableError, ValueError) as exc: raise ConfigValueError([self.SECTION, key], node.value, exc) for key, node in sorted(nodes.items()): if node.is_ignored(): continue if key == "prune-datac-at": # backward compat head = "share/cycle" elif key == "prune-work-at": # backward compat head = "work" elif key.startswith("prune{") and key.endswith("}"): head = key[len("prune{"):-1].strip() # remove "prune{" and "}" else: continue for cycle, cycle_args in self._get_conf(app_runner, conf_tree, key, max_args=1): if cycle_args: cycle_strs = {"cycle": cycle} for key, cycle_format in cycle_formats.items(): if self._get_cycling_mode() == "integer": cycle_strs[key] = cycle_format % int(cycle) else: # date time cycling cycle_point = ( app_runner.date_time_oper.date_parse(cycle)[0]) cycle_strs[key] = ( app_runner.date_time_oper.date_format( cycle_format, cycle_point)) for tail_glob in shlex.split(cycle_args.pop()): glob_ = tail_glob % cycle_strs if glob_ == tail_glob: # no substitution glob_ = os.path.join(cycle, tail_glob) globs.append(os.path.join(head, glob_)) else: globs.append(os.path.join(head, cycle)) return globs
def _conf_value(conf_tree, keys, default=None): """Return conf setting value, with env var processed.""" value = conf_tree.node.get_value(keys, default) if value is None: return try: return env_var_process(value) except UnboundEnvironmentVariableError as exc: raise ConfigValueError(keys, value, exc)
def _get_prune_globs(self, app_runner, conf_tree): """Return prune globs.""" globs = [] nodes = conf_tree.node.get_value([self.SECTION]) if nodes is None: return [] cycle_formats = {} for key, node in nodes.items(): if node.is_ignored(): continue if key.startswith("cycle-format{") and key.endswith("}"): fmt = key[len("cycle-format{"):-1] try: cycle_formats[fmt] = env_var_process(node.value) # Check formats are valid if self._get_cycling_mode() == "integer": cycle_formats[fmt] % 0 else: app_runner.date_time_oper.date_format( cycle_formats[fmt]) except (UnboundEnvironmentVariableError, ValueError) as exc: raise ConfigValueError( [self.SECTION, key], node.value, exc) for key, node in sorted(nodes.items()): if node.is_ignored(): continue if key == "prune-datac-at": # backward compat head = "share/cycle" elif key == "prune-work-at": # backward compat head = "work" elif key.startswith("prune{") and key.endswith("}"): head = key[len("prune{"):-1].strip() # remove "prune{" and "}" else: continue for cycle, cycle_args in self._get_conf( app_runner, conf_tree, key, max_args=1): if cycle_args: cycle_strs = {"cycle": cycle} for key, cycle_format in cycle_formats.items(): if self._get_cycling_mode() == "integer": cycle_strs[key] = cycle_format % int(cycle) else: # date time cycling cycle_point = ( app_runner.date_time_oper.date_parse(cycle)[0]) cycle_strs[key] = ( app_runner.date_time_oper.date_format( cycle_format, cycle_point)) for tail_glob in shlex.split(cycle_args.pop()): glob_ = tail_glob % cycle_strs if glob_ == tail_glob: # no substitution glob_ = os.path.join(cycle, tail_glob) globs.append(os.path.join(head, glob_)) else: globs.append(os.path.join(head, cycle)) return globs
def process(self, conf_tree, item, orig_keys=None, orig_value=None, **_): """Process [jinja2:*] in "conf_tree.node".""" for s_key, s_node in sorted(conf_tree.node.value.items()): if (s_node.is_ignored() or not s_key.startswith(self.PREFIX) or not s_node.value): continue target = s_key[len(self.PREFIX):] source = os.path.join(conf_tree.files[target], target) if not os.access(source, os.F_OK | os.R_OK): continue scheme_ln = self.SCHEME_TEMPL % self.SCHEME msg_init_ln = self.COMMENT_TEMPL % self.MSG_INIT msg_done_ln = self.COMMENT_TEMPL % self.MSG_DONE tmp_file = NamedTemporaryFile() tmp_file.write(scheme_ln) tmp_file.write(msg_init_ln) for key, node in sorted(s_node.value.items()): if node.is_ignored(): continue try: value = env_var_process(node.value) except UnboundEnvironmentVariableError as exc: raise ConfigProcessError([s_key, key], node.value, exc) tmp_file.write(self.ASSIGN_TEMPL % (key, value)) tmp_file.write(msg_done_ln) line_n = 0 is_in_old_insert = False for line in open(source): line_n += 1 if line_n == 1 and line.strip().lower() == scheme_ln.strip(): continue elif line_n == 2 and line == msg_init_ln: is_in_old_insert = True continue elif is_in_old_insert and line == msg_done_ln: is_in_old_insert = False continue elif is_in_old_insert: continue tmp_file.write(line) tmp_file.seek(0) if os.access(target, os.F_OK | os.R_OK): if filecmp.cmp(target, tmp_file.name): # identical tmp_file.close() continue else: self.manager.fs_util.delete(target) # Write content to target target_file = open(target, "w") for line in tmp_file: target_file.write(line) event = FileSystemEvent(FileSystemEvent.INSTALL, target) self.manager.handle_event(event) tmp_file.close()
def _get_conf(self, r_node, t_node, key, compulsory=False, default=None): value = t_node.get_value( [key], r_node.get_value([self.SECTION, key], default=default)) if compulsory and not value: raise CompulsoryConfigValueError([key], None, KeyError(key)) if value: try: value = env_var_process(value) except UnboundEnvironmentVariableError as e: raise ConfigValueError([key], value, e) return value
def _get_conf(self, r_node, t_node, key, compulsory=False, default=None): """Return the value of a configuration.""" value = t_node.get_value([key], r_node.get_value([self.SECTION, key], default=default)) if compulsory and not value: raise CompulsoryConfigValueError([key], None, KeyError(key)) if value: try: value = env_var_process(value) except UnboundEnvironmentVariableError as exc: raise ConfigValueError([key], value, exc) return value
def _find_file(self, var, task): """Finds a file given a variable name containing the filename. Given a variable name and task object, this returns the filename it points to, including expanding any * characters with glob. """ filevar = var + "file" if hasattr(task, filevar): configvar = var + "fileconfig" setattr(task, configvar, getattr(task, filevar)) filenames = glob.glob(env_var_process(getattr(task, filevar))) if len(filenames) > 0: setattr(task, filevar, os.path.abspath(filenames[0])) return task
def process(self, conf_tree, item, orig_keys=None, orig_value=None, **kwargs): """Install files according to [file:*] in conf_tree. kwargs["no_overwrite_mode"]: fail if a target file already exists. """ # Find all the "file:*" nodes. nodes = {} if item == self.SCHEME: for key, node in conf_tree.node.value.items(): if node.is_ignored() or not key.startswith(self.PREFIX): continue nodes[key] = node else: node = conf_tree.node.get([item], no_ignore=True) if node is None: raise ConfigProcessError(orig_keys, item) nodes[item] = node if not nodes: return # Create database to store information for incremental updates, # if it does not already exist. loc_dao = LocDAO() loc_dao.create() cwd = os.getcwd() file_install_root = conf_tree.node.get_value( ["file-install-root"], os.getenv("ROSE_FILE_INSTALL_ROOT", None)) if file_install_root: file_install_root = env_var_process(file_install_root) self.manager.fs_util.makedirs(file_install_root) self.manager.fs_util.chdir(file_install_root) try: self._process(conf_tree, nodes, loc_dao, **kwargs) finally: if cwd != os.getcwd(): self.manager.fs_util.chdir(cwd)
def _get_conf(self, conf_tree, key, max_args=0): """Get a list of cycles from a configuration setting. key -- An option key in self.SECTION to locate the setting. max_args -- Maximum number of extra arguments for an item in the list. The value of the setting is expected to be split by shlex.split into a list of items. If max_args == 0, an item should be a string representing a cycle or an cycle offset. If max_args > 0, the cycle or cycle offset string can, optionally, have arguments. The arguments are delimited by colons ":". E.g.: prune-remote-logs-at=-6h -12h prune-datac-at=-6h:foo/* -12h:'bar/* baz/*' -1d prune-work-at=-6h:t1*:*.tar -12h:t1*: -12h:*.gz -1d If max_args == 0, return a list of cycles. If max_args > 0, return a list of (cycle, [arg, ...]) """ items_str = conf_tree.node.get_value([self.SECTION, key]) if items_str is None: return [] try: items_str = env_var_process(items_str) except UnboundEnvironmentVariableError as exc: raise ConfigValueError([self.SECTION, key], items_str, exc) items = [] dshift = RoseDateShifter(task_cycle_time_mode=True) for item_str in shlex.split(items_str): args = item_str.split(":", max_args) item = args.pop(0) cycle = item if dshift.is_task_cycle_time_mode(): try: cycle = dshift.date_shift(offset=item) except ValueError: pass if max_args: items.append((cycle, args)) else: items.append(cycle) return items
def _get_conf(self, conf_tree, key, arg_ok=False): """Get a list of cycles from a configuration setting. key -- An option key in self.SECTION to locate the setting. arg_ok -- A boolean to indicate whether an item in the list can have extra arguments or not. The value of the setting is expected to be split by shlex.split into a list of items. If arg_ok is False, an item should be a string representing a cycle or an cycle offset. If arg_ok is True, the cycle or cycle offset string can, optionally, have an argument after a colon. E.g.: prune-remote-logs-at=-6h -12h prune-datac-at=-6h:foo/* -12h:'bar/* baz/*' -1d If arg_ok is False, return a list of cycles. If arg_ok is True, return a list of (cycle, arg) """ items_str = conf_tree.node.get_value([self.SECTION, key]) if items_str is None: return [] try: items_str = env_var_process(items_str) except UnboundEnvironmentVariableError as e: raise ConfigValueError([self.SECTION, key], items_str, e) items = [] ds = RoseDateShifter(task_cycle_time_mode=True) for item_str in shlex.split(items_str): if arg_ok and ":" in item_str: item, arg = item_str.split(":", 1) else: item, arg = (item_str, None) if ds.is_task_cycle_time_mode() and ds.is_offset(item): cycle = ds.date_shift(offset=item) else: cycle = item if arg_ok: items.append((cycle, arg)) else: items.append(cycle) return items
def process(self, config, item, orig_keys=None, orig_value=None, **kwargs): """Export environment variables in an env section in "config".""" if config.get([item], no_ignore=True) is None: return if os.environ.has_key("UNDEF"): os.environ.pop("UNDEF") environ = {} env_node = config.get([item], no_ignore=True) if env_node and not env_node.state: for key, node in env_node.value.iteritems(): if node.state: continue try: environ[key] = env_var_process(node.value) except UnboundEnvironmentVariableError as e: raise ConfigProcessError([item, key], node.value, e) environ[key] = os.path.expanduser(environ[key]) # ~ expansion for key, value in sorted(environ.items()): env_export(key, value, self.manager.event_handler) return environ
def _run_init_dir_work(self, opts, suite_name, name, conf_tree=None, r_opts=None, locs_conf=None): """Create a named suite's directory.""" item_path = os.path.realpath(name) item_path_source = item_path key = "root-dir{" + name + "}" item_root = self._run_conf(key, conf_tree=conf_tree, r_opts=r_opts) if item_root is None: # backward compat item_root = self._run_conf( "root-dir-" + name, conf_tree=conf_tree, r_opts=r_opts) if item_root: if locs_conf is not None: locs_conf.set(["localhost", key], item_root) item_root = env_var_process(item_root) suite_dir_rel = self._suite_dir_rel(suite_name) if os.path.isabs(item_root): item_path_source = os.path.join(item_root, suite_dir_rel, name) else: item_path_source = item_root item_path_source = os.path.realpath(item_path_source) if item_path == item_path_source: if opts.new_mode: self.fs_util.delete(name) self.fs_util.makedirs(name) else: if opts.new_mode: self.fs_util.delete(item_path_source) self.fs_util.makedirs(item_path_source) if os.sep in name: dirname_of_name = os.path.dirname(name) self.fs_util.makedirs(dirname_of_name) item_path_source_rel = os.path.relpath( item_path_source, os.path.realpath(dirname_of_name)) else: item_path_source_rel = os.path.relpath(item_path_source) if len(item_path_source_rel) < len(item_path_source): self.fs_util.symlink( item_path_source_rel, name, opts.no_overwrite_mode) else: self.fs_util.symlink( item_path_source, name, opts.no_overwrite_mode)
def process(self, conf_tree, item, orig_keys=None, orig_value=None, **kwargs): """Process [jinja2:*] in "conf_tree.node".""" for key, node in sorted(conf_tree.node.value.items()): if (node.is_ignored() or not key.startswith(self.PREFIX) or not node.value): continue target = key[len(self.PREFIX):] if not os.access(target, os.F_OK | os.R_OK | os.W_OK): continue f = TemporaryFile() f.write("#!" + self.SCHEME + "\n") f.write(self.MSG_INIT) for k, n in sorted(node.value.items()): if n.is_ignored(): continue try: value = env_var_process(n.value) except UnboundEnvironmentVariableError as e: raise ConfigProcessError([key, k], n.value, e) f.write("{%% set %s=%s %%}\n" % (k, value)) f.write(self.MSG_DONE) line_n = 0 is_in_old_insert = False for line in open(target): line_n += 1 if line_n == 1 and line.rstrip().lower() == "#!" + self.SCHEME: continue elif line_n == 2 and line == self.MSG_INIT: is_in_old_insert = True continue elif is_in_old_insert and line == self.MSG_DONE: is_in_old_insert = False continue elif is_in_old_insert: continue f.write(line) f.seek(0) open(target, "w").write(f.read()) f.close()
def _run_init_dir_work(self, opts, suite_name, name, config=None, r_opts=None): """Create a named suite's directory.""" item_path = os.path.realpath(name) item_path_source = item_path key = "root-dir-" + name item_root = self._run_conf(key, config=config, r_opts=r_opts) if item_root is not None: item_root = env_var_process(item_root) suite_dir_rel = self._suite_dir_rel(suite_name) item_path_source = os.path.join(item_root, suite_dir_rel, name) item_path_source = os.path.realpath(item_path_source) if item_path == item_path_source: if opts.new_mode: self.fs_util.delete(name) self.fs_util.makedirs(name) else: if opts.new_mode: self.fs_util.delete(item_path_source) self.fs_util.makedirs(item_path_source) self.fs_util.symlink(item_path_source, name, opts.no_overwrite_mode)
def pull(self, loc, conf_tree): """Write namelist to loc.cache.""" sections = self.parse(loc, conf_tree) if loc.name.endswith("(:)"): sections.sort(rose.config.sort_settings) handle = open(loc.cache, "wb") for section in sections: section_value = conf_tree.node.get_value([section]) group = RE_NAMELIST_GROUP.match(section).group(1) nlg = "&" + group + "\n" for key, node in sorted(section_value.items()): if node.state: continue try: value = env_var_process(node.value) except UnboundEnvironmentVariableError as exc: raise ConfigProcessError([section, key], node.value, exc) nlg += "%s=%s,\n" % (key, value) nlg += "/" + "\n" handle.write(nlg) self.manager.handle_event(NamelistEvent(nlg)) handle.close()
def pull(self, loc, conf_tree): """Write namelist to loc.cache.""" sections = self.parse(loc, conf_tree) if loc.name.endswith("(:)"): sections.sort(key=cmp_to_key(rose.config.sort_settings)) with open(loc.cache, "wb") as handle: for section in sections: section_value = conf_tree.node.get_value([section]) group = RE_NAMELIST_GROUP.match(section).group(1) nlg = "&" + group + "\n" for key, node in sorted(section_value.items()): if node.state: continue try: value = env_var_process(node.value) except UnboundEnvironmentVariableError as exc: raise ConfigProcessError([section, key], node.value, exc) nlg += "%s=%s,\n" % (key, value) nlg += "/" + "\n" handle.write(nlg.encode('UTF-8')) self.manager.handle_event(NamelistEvent(nlg))
def process(self, conf_tree, item, orig_keys=None, orig_value=None, **kwargs): """Process [jinja2:*] in "conf_tree.node". Arguments: conf_tree: The relevant rose.config_tree.ConfigTree object with the full configuration. item: The current configuration item to process. orig_keys: The keys for locating the originating setting in conf_tree in a recursive processing. None implies a top level call. orig_value: The value of orig_keys in conf_tree. **kwargs: environ (dict): suite level environment variables. """ for s_key, s_node in sorted(conf_tree.node.value.items()): if (s_node.is_ignored() or not s_key.startswith(self.PREFIX) or not s_node.value): continue target = s_key[len(self.PREFIX):] source = os.path.join(conf_tree.files[target], target) if not os.access(source, os.F_OK | os.R_OK): continue scheme_ln = self.SCHEME_TEMPL % self.SCHEME msg_init_ln = self.COMMENT_TEMPL % self.MSG_INIT msg_done_ln = self.COMMENT_TEMPL % self.MSG_DONE tmp_file = NamedTemporaryFile() tmp_file.write(scheme_ln) tmp_file.write(msg_init_ln) for key, node in sorted(s_node.value.items()): if node.is_ignored(): continue try: value = env_var_process(node.value) except UnboundEnvironmentVariableError as exc: raise ConfigProcessError([s_key, key], node.value, exc) tmp_file.write(self.ASSIGN_TEMPL % (key, value)) environ = kwargs.get("environ") if environ: tmp_file.write('[cylc]\n') tmp_file.write(' [[environment]]\n') for key, value in sorted(environ.items()): tmp_file.write(' %s=%s\n' % (key, value)) tmp_file.write(msg_done_ln) line_n = 0 is_in_old_insert = False for line in open(source): line_n += 1 if line_n == 1 and line.strip().lower() == scheme_ln.strip(): continue elif line_n == 2 and line == msg_init_ln: is_in_old_insert = True continue elif is_in_old_insert and line == msg_done_ln: is_in_old_insert = False continue elif is_in_old_insert: continue tmp_file.write(line) tmp_file.seek(0) if os.access(target, os.F_OK | os.R_OK): if filecmp.cmp(target, tmp_file.name): # identical tmp_file.close() continue else: self.manager.fs_util.delete(target) # Write content to target target_file = open(target, "w") for line in tmp_file: target_file.write(line) event = FileSystemEvent(FileSystemEvent.INSTALL, target) self.manager.handle_event(event) tmp_file.close()
def _run_target_setup( self, app_runner, compress_manager, config, t_key, t_node): """Helper for _run. Set up a target.""" target_prefix = self._get_conf( config, t_node, "target-prefix", default="") s_key_tail = t_key.split(":", 1)[1] try: s_key_tail = env_var_process(s_key_tail) except UnboundEnvironmentVariableError as exc: raise ConfigValueError([t_key, ""], "", exc) is_compulsory_target = True if s_key_tail.startswith("(") and s_key_tail.endswith(")"): s_key_tail = s_key_tail[1:-1] is_compulsory_target = False target = RoseArchTarget(target_prefix + s_key_tail) target.command_format = self._get_conf( config, t_node, "command-format", compulsory=True) try: target.command_format % {"sources": "", "target": ""} except KeyError as exc: target.status = target.ST_BAD app_runner.handle_event( RoseArchValueError( target.name, "command-format", target.command_format, type(exc).__name__, exc ) ) target.source_edit_format = self._get_conf( config, t_node, "source-edit-format", default="") try: target.source_edit_format % {"in": "", "out": ""} except KeyError as exc: target.status = target.ST_BAD app_runner.handle_event( RoseArchValueError( target.name, "source-edit-format", target.source_edit_format, type(exc).__name__, exc ) ) update_check_str = self._get_conf(config, t_node, "update-check") try: checksum_func = get_checksum_func(update_check_str) except ValueError as exc: raise RoseArchValueError( target.name, "update-check", update_check_str, type(exc).__name__, exc) source_prefix = self._get_conf( config, t_node, "source-prefix", default="") for source_glob in shlex.split( self._get_conf(config, t_node, "source", compulsory=True)): is_compulsory_source = is_compulsory_target if source_glob.startswith("(") and source_glob.endswith(")"): source_glob = source_glob[1:-1] is_compulsory_source = False paths = glob(source_prefix + source_glob) if not paths: exc = OSError(errno.ENOENT, os.strerror(errno.ENOENT), source_prefix + source_glob) app_runner.handle_event(ConfigValueError( [t_key, "source"], source_glob, exc)) if is_compulsory_source: target.status = target.ST_BAD continue for path in paths: # N.B. source_prefix may not be a directory name = path[len(source_prefix):] for path_, checksum, _ in get_checksum(path, checksum_func): if checksum is None: # is directory continue if path_: target.sources[checksum] = RoseArchSource( checksum, os.path.join(name, path_), os.path.join(path, path_)) else: # path is a file target.sources[checksum] = RoseArchSource( checksum, name, path) if not target.sources: if is_compulsory_target: target.status = target.ST_BAD else: target.status = target.ST_NULL target.compress_scheme = self._get_conf(config, t_node, "compress") if not target.compress_scheme: target_base = target.name if "/" in target.name: target_base = target.name.rsplit("/", 1)[1] if "." in target_base: tail = target_base.split(".", 1)[1] if compress_manager.get_handler(tail): target.compress_scheme = tail elif compress_manager.get_handler(target.compress_scheme) is None: app_runner.handle_event(ConfigValueError( [t_key, "compress"], target.compress_scheme, KeyError(target.compress_scheme))) target.status = target.ST_BAD rename_format = self._get_conf(config, t_node, "rename-format") if rename_format: rename_parser_str = self._get_conf(config, t_node, "rename-parser") if rename_parser_str: try: rename_parser = re.compile(rename_parser_str) except re.error as exc: raise RoseArchValueError( target.name, "rename-parser", rename_parser_str, type(exc).__name__, exc) else: rename_parser = None for source in target.sources.values(): dict_ = { "cycle": os.getenv("ROSE_TASK_CYCLE_TIME"), "name": source.name} if rename_parser: match = rename_parser.match(source.name) if match: dict_.update(match.groupdict()) try: source.name = rename_format % dict_ except (KeyError, ValueError) as exc: raise RoseArchValueError( target.name, "rename-format", rename_format, type(exc).__name__, exc) return target
def _get_conf(self, app_runner, conf_tree, key, max_args=0): """Get a list of cycles from a configuration setting. key -- An option key in self.SECTION to locate the setting. max_args -- Maximum number of extra arguments for an item in the list. The value of the setting is expected to be split by shlex.split into a list of items. If max_args == 0, an item should be a string representing a cycle or an cycle offset. If max_args > 0, the cycle or cycle offset string can, optionally, have arguments. The arguments are delimited by colons ":". E.g.: prune-remote-logs-at=-PT6H -PT12H prune-server-logs-at=-P7D prune-datac-at=-PT6H:foo/* -PT12H:'bar/* baz/*' -P1D prune-work-at=-PT6H:t1*:*.tar -PT12H:t1*: -PT12H:*.gz -P1D If max_args == 0, return a list of cycles. If max_args > 0, return a list of (cycle, [arg, ...]) """ items_str = conf_tree.node.get_value([self.SECTION, key]) if items_str is None: return [] try: items_str = env_var_process(items_str) except UnboundEnvironmentVariableError as exc: raise ConfigValueError([self.SECTION, key], items_str, exc) items = [] ref_point_str = os.getenv(RoseDateTimeOperator.TASK_CYCLE_TIME_ENV) try: ref_point = None ref_fmt = None for item_str in shlex.split(items_str): args = item_str.split(":", max_args) when = args.pop(0) cycle = when if ref_point_str is not None: if self._get_cycling_mode() == "integer": # Integer cycling if "P" in when: # "when" is an offset cycle = str( int(ref_point_str) + int(when.replace("P", ""))) else: # "when" is a cycle point cycle = str(when) else: # Date-time cycling if ref_fmt is None: ref_point, ref_fmt = (app_runner.date_time_oper. date_parse(ref_point_str)) try: time_point = app_runner.date_time_oper.date_parse( when)[0] except ValueError: time_point = app_runner.date_time_oper.date_shift( ref_point, when) cycle = app_runner.date_time_oper.date_format( ref_fmt, time_point) if max_args: items.append((cycle, args)) else: items.append(cycle) except ValueError as exc: raise ConfigValueError([self.SECTION, key], items_str, exc) return items
def _clean(self, suite_name, only_items=None): """Perform the cleaning operations.""" engine = self.suite_engine_proc suite_dir_rel = engine.get_suite_dir_rel(suite_name) locs_file_path = engine.get_suite_dir(suite_name, "log", "rose-suite-run.locs") locs_conf = ConfigNode().set(["localhost"], {}) try: ConfigLoader().load(locs_file_path, locs_conf) except IOError: pass items = self.CLEANABLE_ROOTS + [""] if only_items: items = only_items items.sort() uuid_str = str(uuid4()) for auth, node in sorted(locs_conf.value.items(), self._auth_node_cmp): locs = [] for item in items: if item: locs.append(os.path.join(suite_dir_rel, item)) else: locs.append(suite_dir_rel) if item and os.path.normpath(item) in self.CLEANABLE_ROOTS: conf_key = "root-dir-" + item elif item == "": conf_key = "root-dir" else: continue item_root = node.get_value([conf_key]) if item_root: loc_rel = suite_dir_rel if item: loc_rel = os.path.join(suite_dir_rel, item) locs.append(os.path.join(item_root, loc_rel)) if auth == "localhost": for loc in locs: loc = os.path.abspath(env_var_process(loc)) for name in sorted(glob(loc)): engine.fs_util.delete(name) else: # Invoke bash as a login shell. The root location of a path may # be in $DIR syntax, which can only be expanded correctly in a # login shell. However, profile scripts invoked on login to the # remote host may print lots of junks. Hence we use a UUID here # as a delimiter. Only output after the UUID lines are # desirable lines. command = engine.popen.get_cmd("ssh", auth, "bash", "-l", "-c") command += [ "'echo %(uuid)s; ls -d %(locs)s|sort; rm -rf %(locs)s'" % { "locs": engine.popen.list_to_shell_str(locs), "uuid": uuid_str, }, ] is_after_uuid_str = False for line in engine.popen(*command)[0].splitlines(): if is_after_uuid_str: engine.handle_event( FileSystemEvent(FileSystemEvent.DELETE, auth + ":" + line.strip())) elif line == uuid_str: is_after_uuid_str = True
def _run(self, dao, app_runner, config): """Transform and archive suite files. This application is designed to work under "rose task-run" in a suite. """ compress_manager = SchemeHandlersManager( [os.path.dirname(os.path.dirname(sys.modules["rose"].__file__))], "rose.apps.rose_arch_compressions", ["compress_sources"], None, app_runner) # Set up the targets s_key_tails = set() targets = [] for t_key, t_node in sorted(config.value.items()): if t_node.is_ignored() or ":" not in t_key: continue s_key_head, s_key_tail = t_key.split(":", 1) if s_key_head != self.SECTION or not s_key_tail: continue # Determine target path. s_key_tail = t_key.split(":", 1)[1] try: s_key_tail = env_var_process(s_key_tail) except UnboundEnvironmentVariableError as exc: raise ConfigValueError([t_key, ""], "", exc) # If parenthesised target is optional. is_compulsory_target = True if s_key_tail.startswith("(") and s_key_tail.endswith(")"): s_key_tail = s_key_tail[1:-1] is_compulsory_target = False # Don't permit duplicate targets. if s_key_tail in s_key_tails: raise RoseArchDuplicateError([t_key], '', s_key_tail) else: s_key_tails.add(s_key_tail) target = self._run_target_setup( app_runner, compress_manager, config, t_key, s_key_tail, t_node, is_compulsory_target) old_target = dao.select(target.name) if old_target is None or old_target != target: dao.delete(target) else: target.status = target.ST_OLD targets.append(target) targets.sort(key=lambda target: target.name) # Delete from database items that are no longer relevant dao.delete_all(filter_targets=targets) # Update the targets for target in targets: self._run_target_update(dao, app_runner, compress_manager, target) return [target.status for target in targets].count( RoseArchTarget.ST_BAD)
def _poll(self, conf_tree): """Poll for prerequisites of applications.""" # Poll configuration poll_test = conf_tree.node.get_value(["poll", "test"]) poll_all_files_value = conf_tree.node.get_value(["poll", "all-files"]) poll_all_files = [] if poll_all_files_value: try: poll_all_files = shlex.split( env_var_process(poll_all_files_value)) except UnboundEnvironmentVariableError as exc: raise ConfigValueError(["poll", "all-files"], poll_all_files_value, exc) poll_any_files_value = conf_tree.node.get_value(["poll", "any-files"]) poll_any_files = [] if poll_any_files_value: try: poll_any_files = shlex.split( env_var_process(poll_any_files_value)) except UnboundEnvironmentVariableError as exc: raise ConfigValueError(["poll", "any-files"], poll_any_files_value, exc) poll_file_test = None if poll_all_files or poll_any_files: poll_file_test = conf_tree.node.get_value(["poll", "file-test"]) if poll_file_test and "{}" not in poll_file_test: raise ConfigValueError(["poll", "file-test"], poll_file_test, ConfigValueError.SYNTAX) poll_delays = [] if poll_test or poll_all_files or poll_any_files: # Parse something like this: delays=10,4*30s,2.5m,2*1h # No unit or s: seconds # m: minutes # h: hours # N*: repeat the value N times poll_delays_value = conf_tree.node.get_value(["poll", "delays"], default="") poll_delays_value = poll_delays_value.strip() units = {"h": 3600, "m": 60, "s": 1} if poll_delays_value: for item in poll_delays_value.split(","): value = item.strip() repeat = 1 if "*" in value: repeat, value = value.split("*", 1) try: repeat = int(repeat) except ValueError as exc: raise ConfigValueError(["poll", "delays"], poll_delays_value, ConfigValueError.SYNTAX) unit = None if value[-1].lower() in units.keys(): unit = units[value[-1]] value = value[:-1] try: value = float(value) except ValueError as exc: raise ConfigValueError(["poll", "delays"], poll_delays_value, ConfigValueError.SYNTAX) if unit: value *= unit poll_delays += [value] * repeat else: poll_delays = [0] # poll once without a delay # Poll t_init = time() while poll_delays and (poll_test or poll_any_files or poll_all_files): poll_delay = poll_delays.pop(0) if poll_delay: sleep(poll_delay) if poll_test: ret_code = self.popen.run( poll_test, shell=True, stdout=sys.stdout, stderr=sys.stderr)[0] self.handle_event(PollEvent(time(), poll_test, ret_code == 0)) if ret_code == 0: poll_test = None any_files = list(poll_any_files) for file_ in any_files: if self._poll_file(file_, poll_file_test): self.handle_event(PollEvent(time(), "any-files", True)) poll_any_files = [] break all_files = list(poll_all_files) for file_ in all_files: if self._poll_file(file_, poll_file_test): poll_all_files.remove(file_) if all_files and not poll_all_files: self.handle_event(PollEvent(time(), "all-files", True)) failed_items = [] if poll_test: failed_items.append("test") if poll_any_files: failed_items.append("any-files") if poll_all_files: failed_items.append("all-files:" + self.popen.list_to_shell_str(poll_all_files)) if failed_items: now = time() raise PollTimeoutError(now, now - t_init, failed_items)
def _run(self, dao, app_runner, config): """Transform and archive suite files. This application is designed to work under "rose task-run" in a suite. """ path = os.path.dirname(os.path.dirname(sys.modules["rose"].__file__)) compress_manager = SchemeHandlersManager( [path], "rose.apps.rose_arch_compressions", ["compress_sources"], None, app_runner) # Set up the targets cycle = os.getenv("ROSE_TASK_CYCLE_TIME") targets = [] for t_key, t_node in sorted(config.value.items()): if t_node.is_ignored() or ":" not in t_key: continue s_key_head, s_key_tail = t_key.split(":", 1) if s_key_head != self.SECTION or not s_key_tail: continue target_prefix = self._get_conf( config, t_node, "target-prefix", default="") try: s_key_tail = env_var_process(s_key_tail) except UnboundEnvironmentVariableError as exc: raise ConfigValueError([t_key, ""], "", exc) target_name = target_prefix + s_key_tail target = RoseArchTarget(target_name) target.command_format = self._get_conf( config, t_node, "command-format", compulsory=True) try: target.command_format % {"sources": "", "target": ""} except KeyError as exc: target.status = target.ST_BAD app_runner.handle_event( RoseArchValueError( target.name, "command-format", target.command_format, type(exc).__name__, exc ) ) source_str = self._get_conf( config, t_node, "source", compulsory=True) source_prefix = self._get_conf( config, t_node, "source-prefix", default="") target.source_edit_format = self._get_conf( config, t_node, "source-edit-format", default="") try: target.source_edit_format % {"in": "", "out": ""} except KeyError as exc: target.status = target.ST_BAD app_runner.handle_event( RoseArchValueError( target.name, "source-edit-format", target.source_edit_format, type(exc).__name__, exc ) ) update_check_str = self._get_conf( config, t_node, "update-check", default="md5sum") try: checksum_func = get_checksum_func(update_check_str) except KeyError as exc: raise RoseArchValueError( target.name, "update-check", update_check_str, type(exc).__name__, exc ) for source_glob in shlex.split(source_str): paths = glob(source_prefix + source_glob) if not paths: exc = OSError(errno.ENOENT, os.strerror(errno.ENOENT), source_glob) app_runner.handle_event(ConfigValueError( [t_key, "source"], source_glob, exc)) target.status = target.ST_BAD continue for path in paths: # N.B. source_prefix may not be a directory name = path[len(source_prefix):] for path_, checksum, _ in get_checksum( path, checksum_func): if checksum is None: # is directory continue if path_: target.sources[checksum] = RoseArchSource( checksum, os.path.join(name, path_), os.path.join(path, path_)) else: # path is a file target.sources[checksum] = RoseArchSource( checksum, name, path) target.compress_scheme = self._get_conf(config, t_node, "compress") if target.compress_scheme: if (compress_manager.get_handler(target.compress_scheme) is None): app_runner.handle_event(ConfigValueError( [t_key, "compress"], target.compress_scheme, KeyError(target.compress_scheme))) target.status = target.ST_BAD else: target_base = target.name if "/" in target.name: target_base = target.name.rsplit("/", 1)[1] if "." in target_base: tail = target_base.split(".", 1)[1] if compress_manager.get_handler(tail): target.compress_scheme = tail rename_format = self._get_conf(config, t_node, "rename-format") if rename_format: rename_parser_str = self._get_conf(config, t_node, "rename-parser") if rename_parser_str: try: rename_parser = re.compile(rename_parser_str) except re.error as exc: raise RoseArchValueError( target.name, "rename-parser", rename_parser_str, type(exc).__name__, exc ) else: rename_parser = None for source in target.sources.values(): dict_ = {"cycle": cycle, "name": source.name} if rename_parser: match = rename_parser.match(source.name) if match: dict_.update(match.groupdict()) try: source.name = rename_format % dict_ except (KeyError, ValueError) as exc: raise RoseArchValueError( target.name, "rename-format", rename_format, type(exc).__name__, exc ) old_target = dao.select(target.name) if old_target is None or old_target != target: dao.delete(target) else: target.status = target.ST_OLD targets.append(target) # Delete from database items that are no longer relevant dao.delete_all(filter_targets=targets) # Update the targets for target in targets: if target.status == target.ST_OLD: app_runner.handle_event(RoseArchEvent(target)) continue target.command_rc = 1 dao.insert(target) if target.status == target.ST_BAD: app_runner.handle_event(RoseArchEvent(target)) continue work_dir = mkdtemp() t_init = time() t_tran, t_arch = t_init, t_init ret_code = None try: # Rename/edit sources target.status = target.ST_BAD rename_required = False for source in target.sources.values(): if source.name != source.orig_name: rename_required = True break if rename_required or target.source_edit_format: for source in target.sources.values(): source.path = os.path.join(work_dir, source.name) source_path_d = os.path.dirname(source.path) app_runner.fs_util.makedirs(source_path_d) if target.source_edit_format: fmt_args = {"in": source.orig_path, "out": source.path} command = target.source_edit_format % fmt_args app_runner.popen.run_ok(command, shell=True) else: app_runner.fs_util.symlink(source.orig_path, source.path) # Compress sources if target.compress_scheme: handler = compress_manager.get_handler( target.compress_scheme) handler.compress_sources(target, work_dir) t_tran = time() # Run archive command sources = [] if target.work_source_path: sources = [target.work_source_path] else: for source in target.sources.values(): sources.append(source.path) sources_str = app_runner.popen.list_to_shell_str(sources) target_str = app_runner.popen.list_to_shell_str([target.name]) command = target.command_format % {"sources": sources_str, "target": target_str} ret_code, out, err = app_runner.popen.run(command, shell=True) t_arch = time() if ret_code: app_runner.handle_event( RosePopenError([command], ret_code, out, err)) else: target.status = target.ST_NEW app_runner.handle_event(err, kind=Event.KIND_ERR) app_runner.handle_event(out) app_runner.handle_event(out) target.command_rc = ret_code dao.update_command_rc(target) finally: app_runner.fs_util.delete(work_dir) app_runner.handle_event( RoseArchEvent(target, [t_init, t_tran, t_arch], ret_code)) return [target.status for target in targets].count( RoseArchTarget.ST_BAD)
def _run(self, dao, app_runner, config): """Transform and archive suite files. This application is designed to work under "rose task-run" in a suite. """ path = os.path.dirname(os.path.dirname(sys.modules["rose"].__file__)) compress_manager = SchemeHandlersManager( [path], "rose.apps.rose_arch_compressions", ["compress_sources"], None, app_runner) # Set up the targets cycle = os.getenv("ROSE_TASK_CYCLE_TIME") targets = [] for t_key, t_node in sorted(config.value.items()): if t_node.is_ignored() or ":" not in t_key: continue s_key_head, s_key_tail = t_key.split(":", 1) if s_key_head != self.SECTION or not s_key_tail: continue target_prefix = self._get_conf(config, t_node, "target-prefix", default="") try: s_key_tail = env_var_process(s_key_tail) except UnboundEnvironmentVariableError as exc: raise ConfigValueError([t_key, ""], "", exc) target_name = target_prefix + s_key_tail target = RoseArchTarget(target_name) target.command_format = self._get_conf(config, t_node, "command-format", compulsory=True) try: target.command_format % {"sources": "", "target": ""} except KeyError as exc: target.status = target.ST_BAD app_runner.handle_event( RoseArchValueError(target.name, "command-format", target.command_format, type(exc).__name__, exc)) source_str = self._get_conf(config, t_node, "source", compulsory=True) source_prefix = self._get_conf(config, t_node, "source-prefix", default="") target.source_edit_format = self._get_conf(config, t_node, "source-edit-format", default="") try: target.source_edit_format % {"in": "", "out": ""} except KeyError as exc: target.status = target.ST_BAD app_runner.handle_event( RoseArchValueError(target.name, "source-edit-format", target.source_edit_format, type(exc).__name__, exc)) update_check_str = self._get_conf(config, t_node, "update-check", default="md5sum") try: checksum_func = get_checksum_func(update_check_str) except KeyError as exc: raise RoseArchValueError(target.name, "update-check", update_check_str, type(exc).__name__, exc) for source_glob in shlex.split(source_str): paths = glob(source_prefix + source_glob) if not paths: exc = OSError(errno.ENOENT, os.strerror(errno.ENOENT), source_glob) app_runner.handle_event( ConfigValueError([t_key, "source"], source_glob, exc)) target.status = target.ST_BAD continue for path in paths: # N.B. source_prefix may not be a directory name = path[len(source_prefix):] for path_, checksum, _ in get_checksum( path, checksum_func): if checksum is None: # is directory continue if path_: target.sources[checksum] = RoseArchSource( checksum, os.path.join(name, path_), os.path.join(path, path_)) else: # path is a file target.sources[checksum] = RoseArchSource( checksum, name, path) target.compress_scheme = self._get_conf(config, t_node, "compress") if target.compress_scheme: if (compress_manager.get_handler(target.compress_scheme) is None): app_runner.handle_event( ConfigValueError([t_key, "compress"], target.compress_scheme, KeyError(target.compress_scheme))) target.status = target.ST_BAD else: target_base = target.name if "/" in target.name: target_base = target.name.rsplit("/", 1)[1] if "." in target_base: tail = target_base.split(".", 1)[1] if compress_manager.get_handler(tail): target.compress_scheme = tail rename_format = self._get_conf(config, t_node, "rename-format") if rename_format: rename_parser_str = self._get_conf(config, t_node, "rename-parser") if rename_parser_str: try: rename_parser = re.compile(rename_parser_str) except re.error as exc: raise RoseArchValueError(target.name, "rename-parser", rename_parser_str, type(exc).__name__, exc) else: rename_parser = None for source in target.sources.values(): dict_ = {"cycle": cycle, "name": source.name} if rename_parser: match = rename_parser.match(source.name) if match: dict_.update(match.groupdict()) try: source.name = rename_format % dict_ except (KeyError, ValueError) as exc: raise RoseArchValueError(target.name, "rename-format", rename_format, type(exc).__name__, exc) old_target = dao.select(target.name) if old_target is None or old_target != target: dao.delete(target) else: target.status = target.ST_OLD targets.append(target) # Delete from database items that are no longer relevant dao.delete_all(filter_targets=targets) # Update the targets for target in targets: if target.status == target.ST_OLD: app_runner.handle_event(RoseArchEvent(target)) continue target.command_rc = 1 dao.insert(target) if target.status == target.ST_BAD: app_runner.handle_event(RoseArchEvent(target)) continue work_dir = mkdtemp() t_init = time() t_tran, t_arch = t_init, t_init ret_code = None try: # Rename/edit sources target.status = target.ST_BAD rename_required = False for source in target.sources.values(): if source.name != source.orig_name: rename_required = True break if rename_required or target.source_edit_format: for source in target.sources.values(): source.path = os.path.join(work_dir, source.name) source_path_d = os.path.dirname(source.path) app_runner.fs_util.makedirs(source_path_d) if target.source_edit_format: fmt_args = { "in": source.orig_path, "out": source.path } command = target.source_edit_format % fmt_args app_runner.popen.run_ok(command, shell=True) else: app_runner.fs_util.symlink(source.orig_path, source.path) # Compress sources if target.compress_scheme: handler = compress_manager.get_handler( target.compress_scheme) handler.compress_sources(target, work_dir) t_tran = time() # Run archive command sources = [] if target.work_source_path: sources = [target.work_source_path] else: for source in target.sources.values(): sources.append(source.path) sources_str = app_runner.popen.list_to_shell_str(sources) target_str = app_runner.popen.list_to_shell_str([target.name]) command = target.command_format % { "sources": sources_str, "target": target_str } ret_code, out, err = app_runner.popen.run(command, shell=True) t_arch = time() if ret_code: app_runner.handle_event( RosePopenError([command], ret_code, out, err)) else: target.status = target.ST_NEW app_runner.handle_event(err, kind=Event.KIND_ERR) app_runner.handle_event(out) target.command_rc = ret_code dao.update_command_rc(target) finally: app_runner.fs_util.delete(work_dir) app_runner.handle_event( RoseArchEvent(target, [t_init, t_tran, t_arch], ret_code)) return [target.status for target in targets].count(RoseArchTarget.ST_BAD)
def _poll(self, conf_tree): """Poll for prerequisites of applications.""" # Poll configuration poll_test = conf_tree.node.get_value(["poll", "test"]) poll_all_files_value = conf_tree.node.get_value(["poll", "all-files"]) poll_all_files = [] if poll_all_files_value: try: poll_all_files = shlex.split( env_var_process(poll_all_files_value)) except UnboundEnvironmentVariableError as exc: raise ConfigValueError(["poll", "all-files"], poll_all_files_value, exc) poll_any_files_value = conf_tree.node.get_value(["poll", "any-files"]) poll_any_files = [] if poll_any_files_value: try: poll_any_files = shlex.split( env_var_process(poll_any_files_value)) except UnboundEnvironmentVariableError as exc: raise ConfigValueError(["poll", "any-files"], poll_any_files_value, exc) poll_file_test = None if poll_all_files or poll_any_files: poll_file_test = conf_tree.node.get_value(["poll", "file-test"]) if poll_file_test and "{}" not in poll_file_test: raise ConfigValueError(["poll", "file-test"], poll_file_test, ConfigValueError.SYNTAX) poll_delays = [] if poll_test or poll_all_files or poll_any_files: # Parse something like this: delays=10,4*PT30S,PT2M30S,2*PT1H # R*DURATION: repeat the value R times conf_keys = ["poll", "delays"] poll_delays_value = conf_tree.node.get_value( conf_keys, default="").strip() if poll_delays_value: is_legacy0 = None for item in poll_delays_value.split(","): value = item.strip() repeat = 1 if "*" in value: repeat, value = value.split("*", 1) try: repeat = int(repeat) except ValueError as exc: raise ConfigValueError(conf_keys, poll_delays_value, ConfigValueError.SYNTAX) try: value = self.duration_parser.parse(value).get_seconds() is_legacy = False except ISO8601SyntaxError: # Legacy mode: nnnU # nnn is a float, U is the unit # No unit or s: seconds # m: minutes # h: hours unit = None if value[-1].lower() in self.OLD_DURATION_UNITS: unit = self.OLD_DURATION_UNITS[value[-1].lower()] value = value[:-1] try: value = float(value) except ValueError as exc: raise ConfigValueError(conf_keys, poll_delays_value, ConfigValueError.SYNTAX) if unit: value *= unit is_legacy = True if is_legacy0 is None: is_legacy0 = is_legacy elif is_legacy0 != is_legacy: raise ConfigValueError( conf_keys, poll_delays_value, ConfigValueError.DURATION_LEGACY_MIX) poll_delays += [value] * repeat else: poll_delays = [0] # poll once without a delay # Poll t_init = get_timepoint_for_now() while poll_delays and (poll_test or poll_any_files or poll_all_files): poll_delay = poll_delays.pop(0) if poll_delay: sleep(poll_delay) if poll_test: ret_code = self.popen.run( poll_test, shell=True, stdout=sys.stdout, stderr=sys.stderr)[0] self.handle_event(PollEvent(time(), poll_test, ret_code == 0)) if ret_code == 0: poll_test = None any_files = list(poll_any_files) for file_ in any_files: if self._poll_file(file_, poll_file_test): self.handle_event(PollEvent(time(), "any-files", True)) poll_any_files = [] break all_files = list(poll_all_files) for file_ in all_files: if self._poll_file(file_, poll_file_test): poll_all_files.remove(file_) if all_files and not poll_all_files: self.handle_event(PollEvent(time(), "all-files", True)) failed_items = [] if poll_test: failed_items.append("test") if poll_any_files: failed_items.append("any-files") if poll_all_files: failed_items.append("all-files:" + self.popen.list_to_shell_str(poll_all_files)) if failed_items: now = get_timepoint_for_now() raise PollTimeoutError(now, now - t_init, failed_items)
def _load_tasks(self): """Populate the list of analysis tasks from the app config.""" # Fill a dictionary of tasks and extract their options and values # - skipping any which are user/trigger-ignored _tasks = {} for keys, node in self.config.walk(no_ignore=True): task = keys[0] if task.startswith("ana:"): # Capture the options only and save them to the tasks dict task = task.split(":", 1)[1] if len(keys) == 2: # The app may define a section containing rose_ana config # settings; add these to the config dictionary (if any of # the names match existing config options from the global # config it will be overwritten) if task == "config": self.ana_config[keys[1]] = node.value continue _tasks.setdefault(task, {}) # If the value contains newlines, split it into a list # and either way remove any quotation marks and process # any environment variables value = env_var_process(node.value) values = value.split("\n") for ival, value in enumerate(values): values[ival] = (re.sub(r"^((?:'|\")*)(.*)(\1)$", r"\2", value)) # If the user passed a blank curled-braces expression # it should be expanded to contain each of the arguments # passed to rose_ana new_values = [] for value in values: if "{}" in value: if self.args is not None and len(self.args) > 0: for arg in self.args: new_values.append(value.replace("{}", arg)) else: new_values.append(value) else: new_values.append(value) values = new_values if len(values) == 1: values = values[0] _tasks[task][keys[1]] = values # Can now populate the output task list with analysis objects self.analysis_tasks = [] for name in sorted(_tasks.keys()): options = _tasks[name] options["full_task_name"] = name # Create an analysis object for each task, passing through # all options given to the section in the app, the given name # starts with the comparison type and then optionally a # name/description, extract this here match = re.match(r"(?P<atype>[\w\.]+)(?:\((?P<descr>.*)\)|)", name) if match: options["description"] = match.group("descr") atype = match.group("atype") # Assuming this analysis task has been loaded by the app, create # an instance of the task, passing the options to it if atype in self.methods: self.analysis_tasks.append(self.methods[atype](self, options)) else: msg = "Unrecognised analysis type: {0}" raise ValueError(msg.format(atype))
def _load_tasks(self): """Populate the list of analysis tasks from the app config.""" # Fill a dictionary of tasks and extract their options and values # - skipping any which are user/trigger-ignored _tasks = {} for keys, node in self.config.walk(no_ignore=True): task = keys[0] if task.startswith("ana:"): # Capture the options only and save them to the tasks dict task = task.split(":", 1)[1] if len(keys) == 2: # The app may define a section containing rose_ana config # settings; add these to the config dictionary (if any of # the names match existing config options from the global # config it will be overwritten) if task == "config": self.ana_config[keys[1]] = node.value continue _tasks.setdefault(task, {}) # If the value contains newlines, split it into a list # and either way remove any quotation marks and process # any environment variables value = env_var_process(node.value) values = value.split("\n") for ival, value in enumerate(values): values[ival] = ( re.sub(r"^((?:'|\")*)(.*)(\1)$", r"\2", value)) # If the user passed a blank curled-braces expression # it should be expanded to contain each of the arguments # passed to rose_ana new_values = [] for value in values: if "{}" in value: if self.args is not None and len(self.args) > 0: for arg in self.args: new_values.append(value.replace("{}", arg)) else: new_values.append(value) else: new_values.append(value) values = new_values if len(values) == 1: values = values[0] _tasks[task][keys[1]] = values # Can now populate the output task list with analysis objects self.analysis_tasks = [] for name in sorted(_tasks.keys()): options = _tasks[name] options["full_task_name"] = name # Create an analysis object for each task, passing through # all options given to the section in the app, the given name # starts with the comparison type and then optionally a # name/description, extract this here match = re.match(r"(?P<atype>[\w\.]+)(?:\((?P<descr>.*)\)|)", name) if match: options["description"] = match.group("descr") atype = match.group("atype") # Assuming this analysis task has been loaded by the app, create # an instance of the task, passing the options to it if atype in self.methods: self.analysis_tasks.append(self.methods[atype](self, options)) else: # If the analysis type isn't matched by one of the loaded # methods, report the error and return a placeholder # in its place (so that this tasks' main method can show # the task as "failed") msg = "Unrecognised analysis type: {0}" self.reporter(msg.format(atype), prefix="[FAIL] ") # Create a simple object to return - when the run_analysis # method is called by the main loop it will simply raise # an exception, triggering the "error" trap class Dummy(AnalysisTask): def run_analysis(self): raise ImportError(msg.format(atype)) self.analysis_tasks.append(Dummy(self, options))
def _clean(self, suite_name, only_items=None): """Perform the cleaning operations.""" engine = self.suite_engine_proc suite_dir_rel = engine.get_suite_dir_rel(suite_name) locs_file_path = engine.get_suite_dir(suite_name, "log", "rose-suite-run.locs") locs_conf = ConfigNode().set(["localhost"], {}) try: ConfigLoader().load(locs_file_path, locs_conf) except IOError: pass items = self.CLEANABLE_PATHS + [""] if only_items: items = only_items items.sort() uuid_str = str(uuid4()) for auth, node in sorted(locs_conf.value.items(), self._auth_node_cmp): locs = [] roots = set([""]) for item in items: if item: locs.append(os.path.join(suite_dir_rel, item)) else: locs.append(suite_dir_rel) if item and os.path.normpath(item) in self.CLEANABLE_PATHS: item_root = node.get_value(["root-dir{" + item + "}"]) if item_root is None: # backward compat item_root = node.get_value(["root-dir-" + item]) elif item == "": item_root = node.get_value(["root-dir"]) else: continue if item_root: loc_rel = suite_dir_rel if item: loc_rel = os.path.join(suite_dir_rel, item) locs.append(os.path.join(item_root, loc_rel)) roots.add(item_root) if self.host_selector.is_local_host(auth): # Clean relevant items for loc in locs: loc = os.path.abspath(env_var_process(loc)) for name in sorted(glob(loc)): engine.fs_util.delete(name) # Clean empty directories # Change directory to root level to avoid cleaning them as well # For cylc suites, e.g. it can clean up to an empty "cylc-run/" # directory. for root in sorted(roots): cwd = os.getcwd() if root: try: os.chdir(env_var_process(root)) except OSError: continue # Reverse sort to ensure that e.g. "share/cycle/" is # cleaned before "share/" for name in sorted(self.CLEANABLE_PATHS, reverse=True): try: os.removedirs(os.path.join(suite_dir_rel, name)) except OSError: pass try: os.removedirs(suite_dir_rel) except OSError: pass if root: os.chdir(cwd) else: # Invoke bash as a login shell. The root location of a path may # be in $DIR syntax, which can only be expanded correctly in a # login shell. However, profile scripts invoked on login to the # remote host may print lots of junks. Hence we use a UUID here # as a delimiter. Only output after the UUID lines are # desirable lines. command = engine.popen.get_cmd("ssh", auth, "bash", "-l", "-c") sh_command = ( "echo %(uuid)s; ls -d %(locs)s|sort; rm -fr %(locs)s") % { "locs": engine.popen.list_to_shell_str(locs), "uuid": uuid_str, } # Clean empty directories # Change directory to root level to avoid cleaning them as well # For cylc suites, e.g. it can clean up to an empty "cylc-run/" # directory. for root in roots: names = [] # Reverse sort to ensure that e.g. "share/cycle/" is # cleaned before "share/" for name in sorted(self.CLEANABLE_PATHS, reverse=True): names.append(os.path.join(suite_dir_rel, name)) sh_command += ( "; " + "(cd %(root)s; rmdir -p %(names)s 2>/dev/null || true)" ) % { "root": root, "names": engine.popen.list_to_shell_str(names), } command.append(quote(sh_command)) is_after_uuid_str = False for line in engine.popen(*command)[0].splitlines(): if is_after_uuid_str: engine.handle_event( FileSystemEvent(FileSystemEvent.DELETE, auth + ":" + line.strip())) elif line == uuid_str: is_after_uuid_str = True
def _run_target_setup(self, app_runner, compress_manager, config, t_key, t_node): """Helper for _run. Set up a target.""" target_prefix = self._get_conf(config, t_node, "target-prefix", default="") s_key_tail = t_key.split(":", 1)[1] try: s_key_tail = env_var_process(s_key_tail) except UnboundEnvironmentVariableError as exc: raise ConfigValueError([t_key, ""], "", exc) is_compulsory_target = True if s_key_tail.startswith("(") and s_key_tail.endswith(")"): s_key_tail = s_key_tail[1:-1] is_compulsory_target = False target = RoseArchTarget(target_prefix + s_key_tail) target.command_format = self._get_conf(config, t_node, "command-format", compulsory=True) try: target.command_format % {"sources": "", "target": ""} except KeyError as exc: target.status = target.ST_BAD app_runner.handle_event( RoseArchValueError(target.name, "command-format", target.command_format, type(exc).__name__, exc)) target.source_edit_format = self._get_conf(config, t_node, "source-edit-format", default="") try: target.source_edit_format % {"in": "", "out": ""} except KeyError as exc: target.status = target.ST_BAD app_runner.handle_event( RoseArchValueError(target.name, "source-edit-format", target.source_edit_format, type(exc).__name__, exc)) update_check_str = self._get_conf(config, t_node, "update-check") try: checksum_func = get_checksum_func(update_check_str) except ValueError as exc: raise RoseArchValueError(target.name, "update-check", update_check_str, type(exc).__name__, exc) source_prefix = self._get_conf(config, t_node, "source-prefix", default="") for source_glob in shlex.split( self._get_conf(config, t_node, "source", compulsory=True)): is_compulsory_source = is_compulsory_target if source_glob.startswith("(") and source_glob.endswith(")"): source_glob = source_glob[1:-1] is_compulsory_source = False paths = glob(source_prefix + source_glob) if not paths: exc = OSError(errno.ENOENT, os.strerror(errno.ENOENT), source_prefix + source_glob) app_runner.handle_event( ConfigValueError([t_key, "source"], source_glob, exc)) if is_compulsory_source: target.status = target.ST_BAD continue for path in paths: # N.B. source_prefix may not be a directory name = path[len(source_prefix):] for path_, checksum, _ in get_checksum(path, checksum_func): if checksum is None: # is directory continue if path_: target.sources[checksum] = RoseArchSource( checksum, os.path.join(name, path_), os.path.join(path, path_)) else: # path is a file target.sources[checksum] = RoseArchSource( checksum, name, path) if not target.sources: if is_compulsory_target: target.status = target.ST_BAD else: target.status = target.ST_NULL target.compress_scheme = self._get_conf(config, t_node, "compress") if not target.compress_scheme: target_base = target.name if "/" in target.name: target_base = target.name.rsplit("/", 1)[1] if "." in target_base: tail = target_base.split(".", 1)[1] if compress_manager.get_handler(tail): target.compress_scheme = tail elif compress_manager.get_handler(target.compress_scheme) is None: app_runner.handle_event( ConfigValueError([t_key, "compress"], target.compress_scheme, KeyError(target.compress_scheme))) target.status = target.ST_BAD rename_format = self._get_conf(config, t_node, "rename-format") if rename_format: rename_parser_str = self._get_conf(config, t_node, "rename-parser") if rename_parser_str: try: rename_parser = re.compile(rename_parser_str) except re.error as exc: raise RoseArchValueError(target.name, "rename-parser", rename_parser_str, type(exc).__name__, exc) else: rename_parser = None for source in target.sources.values(): dict_ = { "cycle": os.getenv("ROSE_TASK_CYCLE_TIME"), "name": source.name } if rename_parser: match = rename_parser.match(source.name) if match: dict_.update(match.groupdict()) try: source.name = rename_format % dict_ except (KeyError, ValueError) as exc: raise RoseArchValueError(target.name, "rename-format", rename_format, type(exc).__name__, exc) return target
def _process(self, conf_tree, nodes, loc_dao, **kwargs): """Helper for self.process.""" # Ensure that everything is overwritable # Ensure that container directories exist for key, node in sorted(nodes.items()): try: name = env_var_process(key[len(self.PREFIX):]) except UnboundEnvironmentVariableError as exc: raise ConfigProcessError([key], key, exc) if os.path.exists(name) and kwargs.get("no_overwrite_mode"): exc = FileOverwriteError(name) raise ConfigProcessError([key], None, exc) self.manager.fs_util.makedirs(self.manager.fs_util.dirname(name)) # Gets a list of sources and targets sources = {} targets = {} for key, node in sorted(nodes.items()): # N.B. no need to catch UnboundEnvironmentVariableError here # because any exception should been caught earlier. name = env_var_process(key[len(self.PREFIX):]) targets[name] = Loc(name) targets[name].action_key = Loc.A_INSTALL targets[name].mode = node.get_value(["mode"]) if targets[name].mode and targets[name].mode not in Loc.MODES: raise ConfigProcessError([key, "mode"], targets[name].mode) target_sources = [] for k in ["content", "source"]: source_str = node.get_value([k]) if source_str is None: continue try: source_str = env_var_process(source_str) except UnboundEnvironmentVariableError as exc: raise ConfigProcessError([key, k], source_str, exc) source_names = [] for raw_source_glob in shlex.split(source_str): source_glob = raw_source_glob if (raw_source_glob.startswith("(") and raw_source_glob.endswith(")")): source_glob = raw_source_glob[1:-1] names = glob(source_glob) if names: source_names += sorted(names) else: source_names.append(raw_source_glob) for raw_source_name in source_names: source_name = raw_source_name is_optional = (raw_source_name.startswith("(") and raw_source_name.endswith(")")) if is_optional: source_name = raw_source_name[1:-1] if source_name.startswith("~"): source_name = os.path.expanduser(source_name) if targets[name].mode == targets[name].MODE_SYMLINK: if targets[name].real_name: # Symlink mode can only have 1 source raise ConfigProcessError([key, k], source_str) targets[name].real_name = source_name else: if source_name not in sources: sources[source_name] = Loc(source_name) sources[source_name].action_key = Loc.A_SOURCE sources[source_name].is_optional = is_optional sources[source_name].used_by_names.append(name) target_sources.append(sources[source_name]) targets[name].dep_locs = target_sources if (targets[name].mode == targets[name].MODE_SYMLINK and not targets[name].real_name): raise ConfigProcessError([key, "source"], None) # Determine the scheme of the location from configuration. config_schemes_str = conf_tree.node.get_value(["schemes"]) config_schemes = [] # [(pattern, scheme), ...] if config_schemes_str: for line in config_schemes_str.splitlines(): pattern, scheme = line.split("=", 1) pattern = pattern.strip() scheme = scheme.strip() config_schemes.append((pattern, scheme)) # Where applicable, determine for each source: # * Its real name. # * The checksums of its paths. # * Whether it can be considered unchanged. for source in sources.values(): try: for pattern, scheme in config_schemes: if fnmatch(source.name, pattern): source.scheme = scheme break self.loc_handlers_manager.parse(source, conf_tree) except ValueError as exc: if source.is_optional: sources.pop(source.name) for name in source.used_by_names: targets[name].dep_locs.remove(source) event = SourceSkipEvent(name, source.name) self.handle_event(event) continue else: raise ConfigProcessError( ["file:" + source.used_by_names[0], "source"], source.name) prev_source = loc_dao.select(source.name) source.is_out_of_date = ( not prev_source or (not source.key and not source.paths) or prev_source.scheme != source.scheme or prev_source.loc_type != source.loc_type or prev_source.key != source.key or sorted(prev_source.paths) != sorted(source.paths)) # Inspect each target to see if it is out of date: # * Target does not already exist. # * Target exists, but does not have a database entry. # * Target exists, but does not match settings in database. # * Target exists, but a source cannot be considered unchanged. for target in targets.values(): if target.real_name: target.is_out_of_date = ( not os.path.islink(target.name) or target.real_name != os.readlink(target.name)) elif target.mode == target.MODE_MKDIR: target.is_out_of_date = (os.path.islink(target.name) or not os.path.isdir(target.name)) else: # See if target is modified compared with previous record if (os.path.exists(target.name) and not os.path.islink(target.name)): for path, checksum, access_mode in get_checksum( target.name): target.add_path(path, checksum, access_mode) target.paths.sort() prev_target = loc_dao.select(target.name) target.is_out_of_date = ( os.path.islink(target.name) or not os.path.exists(target.name) or prev_target is None or prev_target.mode != target.mode or len(prev_target.paths) != len(target.paths)) if not target.is_out_of_date: for prev_path, path in zip(prev_target.paths, target.paths): if prev_path != path: target.is_out_of_date = True break # See if any sources out of date if not target.is_out_of_date: for dep_loc in target.dep_locs: if dep_loc.is_out_of_date: target.is_out_of_date = True break if target.is_out_of_date: target.paths = None loc_dao.delete(target) # Set up jobs for rebuilding all out-of-date targets. jobs = {} for name, target in sorted(targets.items()): if not target.is_out_of_date: self.handle_event(FileUnchangedEvent(target, level=Event.V)) continue if target.mode == target.MODE_SYMLINK: self.manager.fs_util.symlink(target.real_name, target.name) loc_dao.update(target) elif target.mode == target.MODE_MKDIR: if os.path.islink(target.name): self.manager.fs_util.delete(target.name) self.manager.fs_util.makedirs(target.name) loc_dao.update(target) target.loc_type = target.TYPE_TREE target.add_path(target.BLOB, None, None) elif target.dep_locs: if os.path.islink(target.name): self.manager.fs_util.delete(target.name) jobs[target.name] = JobProxy(target) for source in target.dep_locs: if source.name not in jobs: jobs[source.name] = JobProxy(source) jobs[source.name].event_level = Event.V job = jobs[source.name] jobs[target.name].pending_for[source.name] = job p_name = target.name while (os.path.dirname(p_name) and os.path.dirname(p_name) != p_name): p_name = os.path.dirname(p_name) if p_name in jobs: jobs[target.name].pending_for[p_name] = jobs[p_name] else: self.manager.fs_util.install(target.name) target.loc_type = target.TYPE_BLOB for path, checksum, access_mode in get_checksum(target.name): target.add_path(path, checksum, access_mode) loc_dao.update(target) if jobs: work_dir = mkdtemp() try: nproc_keys = ["rose.config_processors.fileinstall", "nproc"] nproc_str = conf_tree.node.get_value(nproc_keys) nproc = None if nproc_str is not None: nproc = int(nproc_str) job_runner = JobRunner(self, nproc) job_runner(JobManager(jobs), conf_tree, loc_dao, work_dir) except ValueError as exc: if exc.args and exc.args[0] in jobs: job = jobs[exc.args[0]] if job.context.action_key == Loc.A_SOURCE: source = job.context keys = [ self.PREFIX + source.used_by_names[0], "source" ] raise ConfigProcessError(keys, source.name) raise exc finally: rmtree(work_dir) # Target checksum compare and report for target in targets.values(): if (not target.is_out_of_date or target.loc_type == target.TYPE_TREE): continue keys = [self.PREFIX + target.name, "checksum"] checksum_expected = conf_tree.node.get_value(keys) if checksum_expected is None: continue checksum = target.paths[0].checksum if checksum_expected: if len(checksum_expected) != len(checksum): algorithm = guess_checksum_algorithm(checksum_expected) if algorithm: checksum = get_checksum_func(algorithm)(target.name) if checksum_expected != checksum: exc = ChecksumError(checksum_expected, checksum) raise ConfigProcessError(keys, checksum_expected, exc) event = ChecksumEvent(target.name, target.paths[0].checksum) self.handle_event(event)
def _poll(self, conf_tree): """Poll for prerequisites of applications.""" # Poll configuration poll_test = conf_tree.node.get_value(["poll", "test"]) poll_all_files_value = conf_tree.node.get_value(["poll", "all-files"]) poll_all_files = [] if poll_all_files_value: try: poll_all_files = shlex.split( env_var_process(poll_all_files_value)) except UnboundEnvironmentVariableError as exc: raise ConfigValueError(["poll", "all-files"], poll_all_files_value, exc) poll_any_files_value = conf_tree.node.get_value(["poll", "any-files"]) poll_any_files = [] if poll_any_files_value: try: poll_any_files = shlex.split( env_var_process(poll_any_files_value)) except UnboundEnvironmentVariableError as exc: raise ConfigValueError(["poll", "any-files"], poll_any_files_value, exc) poll_file_test = None if poll_all_files or poll_any_files: poll_file_test = conf_tree.node.get_value(["poll", "file-test"]) if poll_file_test and "{}" not in poll_file_test: raise ConfigValueError(["poll", "file-test"], poll_file_test, ConfigValueError.SYNTAX) poll_delays = [] if poll_test or poll_all_files or poll_any_files: # Parse something like this: delays=10,4*PT30S,PT2M30S,2*PT1H # R*DURATION: repeat the value R times conf_keys = ["poll", "delays"] poll_delays_value = conf_tree.node.get_value(conf_keys, default="").strip() if poll_delays_value: is_legacy0 = None for item in poll_delays_value.split(","): value = item.strip() repeat = 1 if "*" in value: repeat, value = value.split("*", 1) try: repeat = int(repeat) except ValueError as exc: raise ConfigValueError(conf_keys, poll_delays_value, ConfigValueError.SYNTAX) try: value = self.duration_parser.parse(value).get_seconds() is_legacy = False except ISO8601SyntaxError: # Legacy mode: nnnU # nnn is a float, U is the unit # No unit or s: seconds # m: minutes # h: hours unit = None if value[-1].lower() in self.OLD_DURATION_UNITS: unit = self.OLD_DURATION_UNITS[value[-1].lower()] value = value[:-1] try: value = float(value) except ValueError as exc: raise ConfigValueError(conf_keys, poll_delays_value, ConfigValueError.SYNTAX) if unit: value *= unit is_legacy = True if is_legacy0 is None: is_legacy0 = is_legacy elif is_legacy0 != is_legacy: raise ConfigValueError( conf_keys, poll_delays_value, ConfigValueError.DURATION_LEGACY_MIX) poll_delays += [value] * repeat else: poll_delays = [0] # poll once without a delay # Poll t_init = get_timepoint_for_now() while poll_delays and (poll_test or poll_any_files or poll_all_files): poll_delay = poll_delays.pop(0) if poll_delay: sleep(poll_delay) if poll_test: ret_code = self.popen.run(poll_test, shell=True, stdout=sys.stdout, stderr=sys.stderr)[0] self.handle_event(PollEvent(time(), poll_test, ret_code == 0)) if ret_code == 0: poll_test = None any_files = list(poll_any_files) for file_ in any_files: if self._poll_file(file_, poll_file_test): self.handle_event(PollEvent(time(), "any-files", True)) poll_any_files = [] break all_files = list(poll_all_files) for file_ in all_files: if self._poll_file(file_, poll_file_test): poll_all_files.remove(file_) if all_files and not poll_all_files: self.handle_event(PollEvent(time(), "all-files", True)) failed_items = [] if poll_test: failed_items.append("test") if poll_any_files: failed_items.append("any-files") if poll_all_files: failed_items.append("all-files:" + self.popen.list_to_shell_str(poll_all_files)) if failed_items: now = get_timepoint_for_now() raise PollTimeoutError(now, now - t_init, failed_items)
def start(is_main=False): """Create the server. If is_main, invoke cherrypy.quickstart. Otherwise, return a cherrypy.Application instance. """ # Environment variables (not normally defined in WSGI mode) if os.getenv("ROSE_HOME") is None: path = os.path.abspath(__file__) while os.path.dirname(path) != path: # not root if os.path.basename(path) == "lib": os.environ["ROSE_HOME"] = os.path.dirname(path) break path = os.path.dirname(path) for key, value in [("ROSE_NS", "rosa"), ("ROSE_UTIL", "ws")]: if os.getenv(key) is None: os.environ[key] = value # CherryPy quick server configuration rose_conf = ResourceLocator.default().get_conf() if is_main and rose_conf.get_value(["rosie-ws", "log-dir"]) is not None: log_dir_value = rose_conf.get_value(["rosie-ws", "log-dir"]) log_dir = env_var_process(os.path.expanduser(log_dir_value)) if not os.path.isdir(log_dir): os.makedirs(log_dir) log_file = os.path.join(log_dir, "server.log") log_error_file = os.path.join(log_dir, "server.err.log") cherrypy.config["log.error_file"] = log_error_file cherrypy.config["log.access_file"] = log_file cherrypy.config["request.error_response"] = _handle_error cherrypy.config["log.screen"] = False # Configuration for dynamic pages db_url_map = {} for key, node in rose_conf.get(["rosie-db"]).value.items(): if key.startswith("db.") and key[3:]: db_url_map[key[3:]] = node.value res_loc = ResourceLocator.default() html_lib = res_loc.get_util_home("lib", "html") icon_path = res_loc.locate("images/rosie-icon-trim.png") tmpl_loader = jinja2.FileSystemLoader(os.path.join(html_lib, "rosie-ws")) root = Root(jinja2.Environment(loader=tmpl_loader), db_url_map) # Configuration for static pages config = { "/etc": { "tools.staticdir.dir": os.path.join(html_lib, "external"), "tools.staticdir.on": True }, "/favicon.ico": { "tools.staticfile.on": True, "tools.staticfile.filename": icon_path } } if is_main: port = int(rose_conf.get_value(["rosie-ws", "port"], 8080)) config.update({ "global": { "server.socket_host": "0.0.0.0", "server.socket_port": port } }) # Start server or return WSGI application if is_main: return cherrypy.quickstart(root, "/", config=config) else: return cherrypy.Application(root, script_name=None, config=config)
def _process(self, conf_tree, nodes, loc_dao, **kwargs): """Helper for self.process.""" # Ensure that everything is overwritable # Ensure that container directories exist for key, node in sorted(nodes.items()): try: name = env_var_process(key[len(self.PREFIX):]) except UnboundEnvironmentVariableError as exc: raise ConfigProcessError([key], key, exc) if os.path.exists(name) and kwargs.get("no_overwrite_mode"): exc = FileOverwriteError(name) raise ConfigProcessError([key], None, exc) self.manager.fs_util.makedirs(self.manager.fs_util.dirname(name)) # Gets a list of sources and targets sources = {} targets = {} for key, node in sorted(nodes.items()): # N.B. no need to catch UnboundEnvironmentVariableError here # because any exception should been caught earlier. name = env_var_process(key[len(self.PREFIX):]) targets[name] = Loc(name) targets[name].action_key = Loc.A_INSTALL targets[name].mode = node.get_value(["mode"]) if targets[name].mode and targets[name].mode not in Loc.MODES: raise ConfigProcessError([key, "mode"], targets[name].mode) target_sources = [] for k in ["content", "source"]: # "content" for back compat source_str = node.get_value([k]) if source_str is None: continue try: source_str = env_var_process(source_str) except UnboundEnvironmentVariableError as exc: raise ConfigProcessError([key, k], source_str, exc) source_names = [] for raw_source_glob in shlex.split(source_str): source_glob = raw_source_glob if (raw_source_glob.startswith("(") and raw_source_glob.endswith(")")): source_glob = raw_source_glob[1:-1] names = glob(source_glob) if names: source_names += sorted(names) else: source_names.append(raw_source_glob) for raw_source_name in source_names: source_name = raw_source_name is_optional = (raw_source_name.startswith("(") and raw_source_name.endswith(")")) if is_optional: source_name = raw_source_name[1:-1] if source_name.startswith("~"): source_name = os.path.expanduser(source_name) if targets[name].mode in [ targets[name].MODE_SYMLINK, targets[name].MODE_SYMLINK_PLUS]: if targets[name].real_name: # Symlink mode can only have 1 source raise ConfigProcessError([key, k], source_str) targets[name].real_name = source_name else: if source_name not in sources: sources[source_name] = Loc(source_name) sources[source_name].action_key = Loc.A_SOURCE sources[source_name].is_optional = is_optional sources[source_name].used_by_names.append(name) target_sources.append(sources[source_name]) targets[name].dep_locs = target_sources if not targets[name].real_name and targets[name].mode in [ targets[name].MODE_SYMLINK, targets[name].MODE_SYMLINK_PLUS]: raise ConfigProcessError([key, "source"], None) # Determine the scheme of the location from configuration. config_schemes_str = conf_tree.node.get_value(["schemes"]) config_schemes = [] # [(pattern, scheme), ...] if config_schemes_str: for line in config_schemes_str.splitlines(): pattern, scheme = line.split("=", 1) pattern = pattern.strip() scheme = scheme.strip() config_schemes.append((pattern, scheme)) # Where applicable, determine for each source: # * Its real name. # * The checksums of its paths. # * Whether it can be considered unchanged. for source in sources.values(): try: for pattern, scheme in config_schemes: if fnmatch(source.name, pattern): source.scheme = scheme break self.loc_handlers_manager.parse(source, conf_tree) except ValueError as exc: if source.is_optional: sources.pop(source.name) for name in source.used_by_names: targets[name].dep_locs.remove(source) event = SourceSkipEvent(name, source.name) self.handle_event(event) continue else: raise ConfigProcessError( ["file:" + source.used_by_names[0], "source"], source.name) prev_source = loc_dao.select(source.name) source.is_out_of_date = ( not prev_source or (not source.key and not source.paths) or prev_source.scheme != source.scheme or prev_source.loc_type != source.loc_type or prev_source.key != source.key or sorted(prev_source.paths) != sorted(source.paths)) # Inspect each target to see if it is out of date: # * Target does not already exist. # * Target exists, but does not have a database entry. # * Target exists, but does not match settings in database. # * Target exists, but a source cannot be considered unchanged. for target in targets.values(): if target.real_name: target.is_out_of_date = ( not os.path.islink(target.name) or target.real_name != os.readlink(target.name)) elif target.mode == target.MODE_MKDIR: target.is_out_of_date = ( os.path.islink(target.name) or not os.path.isdir(target.name)) else: # See if target is modified compared with previous record if (os.path.exists(target.name) and not os.path.islink(target.name)): for path, checksum, access_mode in get_checksum( target.name): target.add_path(path, checksum, access_mode) target.paths.sort() prev_target = loc_dao.select(target.name) target.is_out_of_date = ( os.path.islink(target.name) or not os.path.exists(target.name) or prev_target is None or prev_target.mode != target.mode or len(prev_target.paths) != len(target.paths)) if not target.is_out_of_date: for prev_path, path in zip( prev_target.paths, target.paths): if prev_path != path: target.is_out_of_date = True break # See if any sources out of date if not target.is_out_of_date: for dep_loc in target.dep_locs: if dep_loc.is_out_of_date: target.is_out_of_date = True break if target.is_out_of_date: target.paths = None loc_dao.delete_locs.append(target) # Set up jobs for rebuilding all out-of-date targets. jobs = {} for name, target in sorted(targets.items()): if not target.is_out_of_date: self.handle_event(FileUnchangedEvent(target, level=Event.V)) continue if target.mode in [target.MODE_SYMLINK, target.MODE_SYMLINK_PLUS]: if target.mode == target.MODE_SYMLINK_PLUS: try: os.stat(target.real_name) except OSError as exc: raise ConfigProcessError( [self.PREFIX + target.name, "source"], target.real_name, exc) self.manager.fs_util.symlink(target.real_name, target.name) loc_dao.update_locs.append(target) elif target.mode == target.MODE_MKDIR: if os.path.islink(target.name): self.manager.fs_util.delete(target.name) self.manager.fs_util.makedirs(target.name) loc_dao.update_locs.append(target) target.loc_type = target.TYPE_TREE target.add_path(target.BLOB, None, None) elif target.dep_locs: if os.path.islink(target.name): self.manager.fs_util.delete(target.name) jobs[target.name] = JobProxy(target) for source in target.dep_locs: if source.name not in jobs: jobs[source.name] = JobProxy(source) jobs[source.name].event_level = Event.V job = jobs[source.name] jobs[target.name].pending_for[source.name] = job p_name = target.name while (os.path.dirname(p_name) and os.path.dirname(p_name) != p_name): p_name = os.path.dirname(p_name) if p_name in jobs: jobs[target.name].pending_for[p_name] = jobs[p_name] else: self.manager.fs_util.install(target.name) target.loc_type = target.TYPE_BLOB for path, checksum, access_mode in get_checksum(target.name): target.add_path(path, checksum, access_mode) loc_dao.update_locs.append(target) loc_dao.execute_queued_items() # If relevant, use job runner to get sources and build targets if jobs: work_dir = mkdtemp() try: nproc_keys = ["rose.config_processors.fileinstall", "nproc"] nproc_str = conf_tree.node.get_value(nproc_keys) nproc = None if nproc_str is not None: nproc = int(nproc_str) job_runner = JobRunner(self, nproc) job_runner(JobManager(jobs), conf_tree, loc_dao, work_dir) except ValueError as exc: if exc.args and exc.args[0] in jobs: job = jobs[exc.args[0]] if job.context.action_key == Loc.A_SOURCE: source = job.context keys = [self.PREFIX + source.used_by_names[0], "source"] raise ConfigProcessError(keys, source.name) raise exc finally: loc_dao.execute_queued_items() rmtree(work_dir) # Target checksum compare and report for target in targets.values(): if (not target.is_out_of_date or target.loc_type == target.TYPE_TREE): continue keys = [self.PREFIX + target.name, "checksum"] checksum_expected = conf_tree.node.get_value(keys) if checksum_expected is None: continue checksum = target.paths[0].checksum if checksum_expected: if len(checksum_expected) != len(checksum): algorithm = guess_checksum_algorithm(checksum_expected) if algorithm: checksum = get_checksum_func(algorithm)(target.name) if checksum_expected != checksum: exc = ChecksumError(checksum_expected, checksum) raise ConfigProcessError(keys, checksum_expected, exc) event = ChecksumEvent(target.name, target.paths[0].checksum) self.handle_event(event)
def _clean(self, suite_name, only_items=None): """Perform the cleaning operations.""" engine = self.suite_engine_proc suite_dir_rel = engine.get_suite_dir_rel(suite_name) locs_file_path = engine.get_suite_dir( suite_name, "log", "rose-suite-run.locs") locs_conf = ConfigNode().set(["localhost"], {}) try: ConfigLoader().load(locs_file_path, locs_conf) except IOError: pass items = self.CLEANABLE_PATHS + [""] if only_items: items = only_items items.sort() uuid_str = str(uuid4()) for auth, node in sorted(locs_conf.value.items(), self._auth_node_cmp): locs = [] roots = set([""]) for item in items: if item: locs.append(os.path.join(suite_dir_rel, item)) else: locs.append(suite_dir_rel) if item and os.path.normpath(item) in self.CLEANABLE_PATHS: item_root = node.get_value(["root-dir{" + item + "}"]) if item_root is None: # backward compat item_root = node.get_value(["root-dir-" + item]) elif item == "": item_root = node.get_value(["root-dir"]) else: continue if item_root: loc_rel = suite_dir_rel if item: loc_rel = os.path.join(suite_dir_rel, item) locs.append(os.path.join(item_root, loc_rel)) roots.add(item_root) if self.host_selector.is_local_host(auth): # Clean relevant items for loc in locs: loc = os.path.abspath(env_var_process(loc)) for name in sorted(glob(loc)): engine.fs_util.delete(name) # Clean empty directories # Change directory to root level to avoid cleaning them as well # For cylc suites, e.g. it can clean up to an empty "cylc-run/" # directory. for root in sorted(roots): cwd = os.getcwd() if root: try: os.chdir(env_var_process(root)) except OSError: continue # Reverse sort to ensure that e.g. "share/cycle/" is # cleaned before "share/" for name in sorted(self.CLEANABLE_PATHS, reverse=True): try: os.removedirs(os.path.join(suite_dir_rel, name)) except OSError: pass try: os.removedirs(suite_dir_rel) except OSError: pass if root: os.chdir(cwd) else: # Invoke bash as a login shell. The root location of a path may # be in $DIR syntax, which can only be expanded correctly in a # login shell. However, profile scripts invoked on login to the # remote host may print lots of junks. Hence we use a UUID here # as a delimiter. Only output after the UUID lines are # desirable lines. command = engine.popen.get_cmd("ssh", auth, "bash", "-l", "-c") sh_command = ( "echo %(uuid)s; ls -d %(locs)s|sort; rm -fr %(locs)s" ) % { "locs": engine.popen.list_to_shell_str(locs), "uuid": uuid_str, } # Clean empty directories # Change directory to root level to avoid cleaning them as well # For cylc suites, e.g. it can clean up to an empty "cylc-run/" # directory. for root in roots: names = [] # Reverse sort to ensure that e.g. "share/cycle/" is # cleaned before "share/" for name in sorted(self.CLEANABLE_PATHS, reverse=True): names.append(os.path.join(suite_dir_rel, name)) sh_command += ( "; " + "(cd %(root)s; rmdir -p %(names)s 2>/dev/null || true)" ) % { "root": root, "names": engine.popen.list_to_shell_str(names), } command.append(quote(sh_command)) is_after_uuid_str = False for line in engine.popen(*command)[0].splitlines(): if is_after_uuid_str: engine.handle_event(FileSystemEvent( FileSystemEvent.DELETE, auth + ":" + line.strip())) elif line == uuid_str: is_after_uuid_str = True
def main(): """Implement the "rose config" command.""" opt_parser = RoseOptionParser() opt_parser.add_my_options("default", "env_var_process_mode", "files", "keys", "meta", "meta_key", "no_ignore", "no_opts", "print_conf_mode") opts, args = opt_parser.parse_args() report = Reporter(opts.verbosity - opts.quietness) rose.macro.add_meta_paths() if opts.meta_key: opts.meta = True if opts.files and opts.meta_key: report(Exception("Cannot specify both a file and meta key.")) sys.exit(1) config_loader = ConfigLoader() sources = [] if opts.files: root_node = ConfigNode() for fname in opts.files: if fname == "-": sources.append(sys.stdin) else: if opts.meta: try: root_node = config_loader.load(fname) except ConfigSyntaxError as exc: report(exc) sys.exit(1) rel_path = os.sep.join(fname.split(os.sep)[:-1]) fpath = get_meta_path(root_node, rel_path) if fpath is None: report(MetadataNotFoundEvent(fname)) else: sources.append(fpath) else: sources.append(fname) elif opts.meta: root_node = ConfigNode() if opts.meta_key: root_node.set(["meta"], opts.meta_key) else: fname = os.path.join(os.getcwd(), rose.SUB_CONFIG_NAME) try: root_node = config_loader.load(fname) except ConfigSyntaxError as exc: report(exc) sys.exit(1) fpath = get_meta_path(root_node, meta_key=opts.meta_key) root_node.unset(["meta"]) if fpath is None: report(Exception("Metadata not found")) sys.exit(1) else: sources.append(fpath) else: root_node = ResourceLocator.default().get_conf() for source in sources: try: if opts.meta or opts.no_opts: config_loader.load(source, root_node) else: config_loader.load_with_opts(source, root_node) except (ConfigSyntaxError, IOError) as exc: report(exc) sys.exit(1) if source is sys.stdin: source.close() if opts.quietness: sys.exit(root_node.get(args, opts.no_ignore) is None) if opts.keys_mode: try: keys = root_node.get(args, opts.no_ignore).value.keys() except AttributeError: sys.exit(1) keys.sort() for key in keys: print key sys.exit() conf_dump = ConfigDumper() if len(args) == 0: conf_dump(root_node, concat_mode=opts.print_conf_mode) sys.exit() node = root_node.get(args, opts.no_ignore) if node is not None and isinstance(node.value, dict): if opts.print_conf_mode: conf_dump(ConfigNode().set(args, node.value), concat_mode=True) sys.exit() keys = node.value.keys() keys.sort() for key in keys: node_of_key = node.get([key], opts.no_ignore) if node_of_key: value = node_of_key.value state = node_of_key.state string = "%s%s=%s" % (state, key, value) lines = string.splitlines() print lines[0] i_equal = len(state + key) + 1 for line in lines[1:]: print " " * i_equal + line sys.exit() if node is None: if opts.default is None: sys.exit(1) value = opts.default elif opts.env_var_process_mode: value = env_var_process(node.value) else: value = node.value if opts.print_conf_mode: conf_dump(ConfigNode().set(args, value), concat_mode=True) else: print value sys.exit()