def _target_install(self, target, conf_tree, work_dir): """Install target. Build target using its source(s). Calculate the checksum(s) of (paths in) target. """ handle = None mod_bits = None is_first = True # Install target for source in target.dep_locs: if target.loc_type is None: target.loc_type = source.loc_type elif target.loc_type != source.loc_type: raise LocTypeError(target.name, source.name, target.loc_type, source.loc_type) if target.loc_type == target.TYPE_BLOB: if handle is None: if not os.path.isfile(target.name): self.manager.fs_util.delete(target.name) handle = open(target.name, "wb") f_bsize = os.statvfs(source.cache).f_bsize source_handle = open(source.cache, 'rb') while True: bytes_ = source_handle.read(f_bsize) if not bytes_: break try: handle.write(bytes_.encode()) except AttributeError: handle.write(bytes_) source_handle.close() if mod_bits is None: mod_bits = os.stat(source.cache).st_mode else: mod_bits |= os.stat(source.cache).st_mode else: # target.loc_type == target.TYPE_TREE args = [] if is_first: self.manager.fs_util.makedirs(target.name) args.extend(["--checksum", source.cache + "/", target.name]) cmd = self.manager.popen.get_cmd("rsync", *args) self.manager.popen(*cmd) is_first = False if handle is not None: handle.close() if mod_bits: os.chmod(target.name, mod_bits) # TODO: auto decompression of tar, gzip, etc? # Calculate target checksum(s) for path, checksum, access_mode in get_checksum(target.name): target.add_path(path, checksum, access_mode)
def _target_install(self, target, conf_tree, work_dir): """Install target. Build target using its source(s). Calculate the checksum(s) of (paths in) target. """ handle = None mod_bits = None is_first = True # Install target for source in target.dep_locs: if target.loc_type is None: target.loc_type = source.loc_type elif target.loc_type != source.loc_type: raise LocTypeError(target.name, source.name, target.loc_type, source.loc_type) if target.loc_type == target.TYPE_BLOB: if handle is None: if not os.path.isfile(target.name): self.manager.fs_util.delete(target.name) handle = open(target.name, "wb") f_bsize = os.statvfs(source.cache).f_bsize source_handle = open(source.cache) while True: bytes_ = source_handle.read(f_bsize) if not bytes_: break handle.write(bytes_) source_handle.close() if mod_bits is None: mod_bits = os.stat(source.cache).st_mode else: mod_bits |= os.stat(source.cache).st_mode else: # target.loc_type == target.TYPE_TREE args = [] if is_first: self.manager.fs_util.makedirs(target.name) args.extend(["--checksum", source.cache + "/", target.name]) cmd = self.manager.popen.get_cmd("rsync", *args) self.manager.popen(*cmd) is_first = False if handle is not None: handle.close() if mod_bits: os.chmod(target.name, mod_bits) # TODO: auto decompression of tar, gzip, etc? # Calculate target checksum(s) for path, checksum, access_mode in get_checksum(target.name): target.add_path(path, checksum, access_mode)
def parse(cls, loc, _): """Set loc.scheme, loc.loc_type, loc.paths.""" loc.scheme = "fs" name = os.path.expanduser(loc.name) if not os.path.exists(name): raise ValueError(loc.name) paths_and_checksums = get_checksum(name) for path, checksum, access_mode in paths_and_checksums: loc.add_path(path, checksum, access_mode) if len(paths_and_checksums) == 1 and paths_and_checksums[0][0] == "": loc.loc_type = loc.TYPE_BLOB else: loc.loc_type = loc.TYPE_TREE
def _process(self, conf_tree, nodes, loc_dao, **kwargs): """Helper for self.process.""" # Ensure that everything is overwritable # Ensure that container directories exist for key, node in sorted(nodes.items()): try: name = env_var_process(key[len(self.PREFIX):]) except UnboundEnvironmentVariableError as exc: raise ConfigProcessError([key], key, exc) if os.path.exists(name) and kwargs.get("no_overwrite_mode"): exc = FileOverwriteError(name) raise ConfigProcessError([key], None, exc) self.manager.fs_util.makedirs(self.manager.fs_util.dirname(name)) # Gets a list of sources and targets sources = {} targets = {} for key, node in sorted(nodes.items()): # N.B. no need to catch UnboundEnvironmentVariableError here # because any exception should been caught earlier. name = env_var_process(key[len(self.PREFIX):]) targets[name] = Loc(name) targets[name].action_key = Loc.A_INSTALL targets[name].mode = node.get_value(["mode"]) if targets[name].mode and targets[name].mode not in Loc.MODES: raise ConfigProcessError([key, "mode"], targets[name].mode) target_sources = [] for k in ["content", "source"]: source_str = node.get_value([k]) if source_str is None: continue try: source_str = env_var_process(source_str) except UnboundEnvironmentVariableError as exc: raise ConfigProcessError([key, k], source_str, exc) source_names = [] for raw_source_glob in shlex.split(source_str): source_glob = raw_source_glob if (raw_source_glob.startswith("(") and raw_source_glob.endswith(")")): source_glob = raw_source_glob[1:-1] names = glob(source_glob) if names: source_names += sorted(names) else: source_names.append(raw_source_glob) for raw_source_name in source_names: source_name = raw_source_name is_optional = (raw_source_name.startswith("(") and raw_source_name.endswith(")")) if is_optional: source_name = raw_source_name[1:-1] if source_name.startswith("~"): source_name = os.path.expanduser(source_name) if targets[name].mode == targets[name].MODE_SYMLINK: if targets[name].real_name: # Symlink mode can only have 1 source raise ConfigProcessError([key, k], source_str) targets[name].real_name = source_name else: if source_name not in sources: sources[source_name] = Loc(source_name) sources[source_name].action_key = Loc.A_SOURCE sources[source_name].is_optional = is_optional sources[source_name].used_by_names.append(name) target_sources.append(sources[source_name]) targets[name].dep_locs = target_sources if (targets[name].mode == targets[name].MODE_SYMLINK and not targets[name].real_name): raise ConfigProcessError([key, "source"], None) # Determine the scheme of the location from configuration. config_schemes_str = conf_tree.node.get_value(["schemes"]) config_schemes = [] # [(pattern, scheme), ...] if config_schemes_str: for line in config_schemes_str.splitlines(): pattern, scheme = line.split("=", 1) pattern = pattern.strip() scheme = scheme.strip() config_schemes.append((pattern, scheme)) # Where applicable, determine for each source: # * Its real name. # * The checksums of its paths. # * Whether it can be considered unchanged. for source in sources.values(): try: for pattern, scheme in config_schemes: if fnmatch(source.name, pattern): source.scheme = scheme break self.loc_handlers_manager.parse(source, conf_tree) except ValueError as exc: if source.is_optional: sources.pop(source.name) for name in source.used_by_names: targets[name].dep_locs.remove(source) event = SourceSkipEvent(name, source.name) self.handle_event(event) continue else: raise ConfigProcessError( ["file:" + source.used_by_names[0], "source"], source.name) prev_source = loc_dao.select(source.name) source.is_out_of_date = ( not prev_source or (not source.key and not source.paths) or prev_source.scheme != source.scheme or prev_source.loc_type != source.loc_type or prev_source.key != source.key or sorted(prev_source.paths) != sorted(source.paths)) # Inspect each target to see if it is out of date: # * Target does not already exist. # * Target exists, but does not have a database entry. # * Target exists, but does not match settings in database. # * Target exists, but a source cannot be considered unchanged. for target in targets.values(): if target.real_name: target.is_out_of_date = ( not os.path.islink(target.name) or target.real_name != os.readlink(target.name)) elif target.mode == target.MODE_MKDIR: target.is_out_of_date = (os.path.islink(target.name) or not os.path.isdir(target.name)) else: # See if target is modified compared with previous record if (os.path.exists(target.name) and not os.path.islink(target.name)): for path, checksum, access_mode in get_checksum( target.name): target.add_path(path, checksum, access_mode) target.paths.sort() prev_target = loc_dao.select(target.name) target.is_out_of_date = ( os.path.islink(target.name) or not os.path.exists(target.name) or prev_target is None or prev_target.mode != target.mode or len(prev_target.paths) != len(target.paths)) if not target.is_out_of_date: for prev_path, path in zip(prev_target.paths, target.paths): if prev_path != path: target.is_out_of_date = True break # See if any sources out of date if not target.is_out_of_date: for dep_loc in target.dep_locs: if dep_loc.is_out_of_date: target.is_out_of_date = True break if target.is_out_of_date: target.paths = None loc_dao.delete(target) # Set up jobs for rebuilding all out-of-date targets. jobs = {} for name, target in sorted(targets.items()): if not target.is_out_of_date: self.handle_event(FileUnchangedEvent(target, level=Event.V)) continue if target.mode == target.MODE_SYMLINK: self.manager.fs_util.symlink(target.real_name, target.name) loc_dao.update(target) elif target.mode == target.MODE_MKDIR: if os.path.islink(target.name): self.manager.fs_util.delete(target.name) self.manager.fs_util.makedirs(target.name) loc_dao.update(target) target.loc_type = target.TYPE_TREE target.add_path(target.BLOB, None, None) elif target.dep_locs: if os.path.islink(target.name): self.manager.fs_util.delete(target.name) jobs[target.name] = JobProxy(target) for source in target.dep_locs: if source.name not in jobs: jobs[source.name] = JobProxy(source) jobs[source.name].event_level = Event.V job = jobs[source.name] jobs[target.name].pending_for[source.name] = job p_name = target.name while (os.path.dirname(p_name) and os.path.dirname(p_name) != p_name): p_name = os.path.dirname(p_name) if p_name in jobs: jobs[target.name].pending_for[p_name] = jobs[p_name] else: self.manager.fs_util.install(target.name) target.loc_type = target.TYPE_BLOB for path, checksum, access_mode in get_checksum(target.name): target.add_path(path, checksum, access_mode) loc_dao.update(target) if jobs: work_dir = mkdtemp() try: nproc_keys = ["rose.config_processors.fileinstall", "nproc"] nproc_str = conf_tree.node.get_value(nproc_keys) nproc = None if nproc_str is not None: nproc = int(nproc_str) job_runner = JobRunner(self, nproc) job_runner(JobManager(jobs), conf_tree, loc_dao, work_dir) except ValueError as exc: if exc.args and exc.args[0] in jobs: job = jobs[exc.args[0]] if job.context.action_key == Loc.A_SOURCE: source = job.context keys = [ self.PREFIX + source.used_by_names[0], "source" ] raise ConfigProcessError(keys, source.name) raise exc finally: rmtree(work_dir) # Target checksum compare and report for target in targets.values(): if (not target.is_out_of_date or target.loc_type == target.TYPE_TREE): continue keys = [self.PREFIX + target.name, "checksum"] checksum_expected = conf_tree.node.get_value(keys) if checksum_expected is None: continue checksum = target.paths[0].checksum if checksum_expected: if len(checksum_expected) != len(checksum): algorithm = guess_checksum_algorithm(checksum_expected) if algorithm: checksum = get_checksum_func(algorithm)(target.name) if checksum_expected != checksum: exc = ChecksumError(checksum_expected, checksum) raise ConfigProcessError(keys, checksum_expected, exc) event = ChecksumEvent(target.name, target.paths[0].checksum) self.handle_event(event)
def _run(self, dao, app_runner, config): """Transform and archive suite files. This application is designed to work under "rose task-run" in a suite. """ path = os.path.dirname(os.path.dirname(sys.modules["rose"].__file__)) compress_manager = SchemeHandlersManager( [path], "rose.apps.rose_arch_compressions", ["compress_sources"], None, app_runner) # Set up the targets cycle = os.getenv("ROSE_TASK_CYCLE_TIME") targets = [] for t_key, t_node in sorted(config.value.items()): if t_node.is_ignored() or ":" not in t_key: continue s_key_head, s_key_tail = t_key.split(":", 1) if s_key_head != self.SECTION or not s_key_tail: continue target_prefix = self._get_conf( config, t_node, "target-prefix", default="") try: s_key_tail = env_var_process(s_key_tail) except UnboundEnvironmentVariableError as exc: raise ConfigValueError([t_key, ""], "", exc) target_name = target_prefix + s_key_tail target = RoseArchTarget(target_name) target.command_format = self._get_conf( config, t_node, "command-format", compulsory=True) try: target.command_format % {"sources": "", "target": ""} except KeyError as exc: target.status = target.ST_BAD app_runner.handle_event( RoseArchValueError( target.name, "command-format", target.command_format, type(exc).__name__, exc ) ) source_str = self._get_conf( config, t_node, "source", compulsory=True) source_prefix = self._get_conf( config, t_node, "source-prefix", default="") target.source_edit_format = self._get_conf( config, t_node, "source-edit-format", default="") try: target.source_edit_format % {"in": "", "out": ""} except KeyError as exc: target.status = target.ST_BAD app_runner.handle_event( RoseArchValueError( target.name, "source-edit-format", target.source_edit_format, type(exc).__name__, exc ) ) update_check_str = self._get_conf( config, t_node, "update-check", default="md5sum") try: checksum_func = get_checksum_func(update_check_str) except KeyError as exc: raise RoseArchValueError( target.name, "update-check", update_check_str, type(exc).__name__, exc ) for source_glob in shlex.split(source_str): paths = glob(source_prefix + source_glob) if not paths: exc = OSError(errno.ENOENT, os.strerror(errno.ENOENT), source_glob) app_runner.handle_event(ConfigValueError( [t_key, "source"], source_glob, exc)) target.status = target.ST_BAD continue for path in paths: # N.B. source_prefix may not be a directory name = path[len(source_prefix):] for path_, checksum, _ in get_checksum( path, checksum_func): if checksum is None: # is directory continue if path_: target.sources[checksum] = RoseArchSource( checksum, os.path.join(name, path_), os.path.join(path, path_)) else: # path is a file target.sources[checksum] = RoseArchSource( checksum, name, path) target.compress_scheme = self._get_conf(config, t_node, "compress") if target.compress_scheme: if (compress_manager.get_handler(target.compress_scheme) is None): app_runner.handle_event(ConfigValueError( [t_key, "compress"], target.compress_scheme, KeyError(target.compress_scheme))) target.status = target.ST_BAD else: target_base = target.name if "/" in target.name: target_base = target.name.rsplit("/", 1)[1] if "." in target_base: tail = target_base.split(".", 1)[1] if compress_manager.get_handler(tail): target.compress_scheme = tail rename_format = self._get_conf(config, t_node, "rename-format") if rename_format: rename_parser_str = self._get_conf(config, t_node, "rename-parser") if rename_parser_str: try: rename_parser = re.compile(rename_parser_str) except re.error as exc: raise RoseArchValueError( target.name, "rename-parser", rename_parser_str, type(exc).__name__, exc ) else: rename_parser = None for source in target.sources.values(): dict_ = {"cycle": cycle, "name": source.name} if rename_parser: match = rename_parser.match(source.name) if match: dict_.update(match.groupdict()) try: source.name = rename_format % dict_ except (KeyError, ValueError) as exc: raise RoseArchValueError( target.name, "rename-format", rename_format, type(exc).__name__, exc ) old_target = dao.select(target.name) if old_target is None or old_target != target: dao.delete(target) else: target.status = target.ST_OLD targets.append(target) # Delete from database items that are no longer relevant dao.delete_all(filter_targets=targets) # Update the targets for target in targets: if target.status == target.ST_OLD: app_runner.handle_event(RoseArchEvent(target)) continue target.command_rc = 1 dao.insert(target) if target.status == target.ST_BAD: app_runner.handle_event(RoseArchEvent(target)) continue work_dir = mkdtemp() t_init = time() t_tran, t_arch = t_init, t_init ret_code = None try: # Rename/edit sources target.status = target.ST_BAD rename_required = False for source in target.sources.values(): if source.name != source.orig_name: rename_required = True break if rename_required or target.source_edit_format: for source in target.sources.values(): source.path = os.path.join(work_dir, source.name) source_path_d = os.path.dirname(source.path) app_runner.fs_util.makedirs(source_path_d) if target.source_edit_format: fmt_args = {"in": source.orig_path, "out": source.path} command = target.source_edit_format % fmt_args app_runner.popen.run_ok(command, shell=True) else: app_runner.fs_util.symlink(source.orig_path, source.path) # Compress sources if target.compress_scheme: handler = compress_manager.get_handler( target.compress_scheme) handler.compress_sources(target, work_dir) t_tran = time() # Run archive command sources = [] if target.work_source_path: sources = [target.work_source_path] else: for source in target.sources.values(): sources.append(source.path) sources_str = app_runner.popen.list_to_shell_str(sources) target_str = app_runner.popen.list_to_shell_str([target.name]) command = target.command_format % {"sources": sources_str, "target": target_str} ret_code, out, err = app_runner.popen.run(command, shell=True) t_arch = time() if ret_code: app_runner.handle_event( RosePopenError([command], ret_code, out, err)) else: target.status = target.ST_NEW app_runner.handle_event(err, kind=Event.KIND_ERR) app_runner.handle_event(out) app_runner.handle_event(out) target.command_rc = ret_code dao.update_command_rc(target) finally: app_runner.fs_util.delete(work_dir) app_runner.handle_event( RoseArchEvent(target, [t_init, t_tran, t_arch], ret_code)) return [target.status for target in targets].count( RoseArchTarget.ST_BAD)
def _run_target_setup( self, app_runner, compress_manager, config, t_key, t_node): """Helper for _run. Set up a target.""" target_prefix = self._get_conf( config, t_node, "target-prefix", default="") s_key_tail = t_key.split(":", 1)[1] try: s_key_tail = env_var_process(s_key_tail) except UnboundEnvironmentVariableError as exc: raise ConfigValueError([t_key, ""], "", exc) is_compulsory_target = True if s_key_tail.startswith("(") and s_key_tail.endswith(")"): s_key_tail = s_key_tail[1:-1] is_compulsory_target = False target = RoseArchTarget(target_prefix + s_key_tail) target.command_format = self._get_conf( config, t_node, "command-format", compulsory=True) try: target.command_format % {"sources": "", "target": ""} except KeyError as exc: target.status = target.ST_BAD app_runner.handle_event( RoseArchValueError( target.name, "command-format", target.command_format, type(exc).__name__, exc ) ) target.source_edit_format = self._get_conf( config, t_node, "source-edit-format", default="") try: target.source_edit_format % {"in": "", "out": ""} except KeyError as exc: target.status = target.ST_BAD app_runner.handle_event( RoseArchValueError( target.name, "source-edit-format", target.source_edit_format, type(exc).__name__, exc ) ) update_check_str = self._get_conf(config, t_node, "update-check") try: checksum_func = get_checksum_func(update_check_str) except ValueError as exc: raise RoseArchValueError( target.name, "update-check", update_check_str, type(exc).__name__, exc) source_prefix = self._get_conf( config, t_node, "source-prefix", default="") for source_glob in shlex.split( self._get_conf(config, t_node, "source", compulsory=True)): is_compulsory_source = is_compulsory_target if source_glob.startswith("(") and source_glob.endswith(")"): source_glob = source_glob[1:-1] is_compulsory_source = False paths = glob(source_prefix + source_glob) if not paths: exc = OSError(errno.ENOENT, os.strerror(errno.ENOENT), source_prefix + source_glob) app_runner.handle_event(ConfigValueError( [t_key, "source"], source_glob, exc)) if is_compulsory_source: target.status = target.ST_BAD continue for path in paths: # N.B. source_prefix may not be a directory name = path[len(source_prefix):] for path_, checksum, _ in get_checksum(path, checksum_func): if checksum is None: # is directory continue if path_: target.sources[checksum] = RoseArchSource( checksum, os.path.join(name, path_), os.path.join(path, path_)) else: # path is a file target.sources[checksum] = RoseArchSource( checksum, name, path) if not target.sources: if is_compulsory_target: target.status = target.ST_BAD else: target.status = target.ST_NULL target.compress_scheme = self._get_conf(config, t_node, "compress") if not target.compress_scheme: target_base = target.name if "/" in target.name: target_base = target.name.rsplit("/", 1)[1] if "." in target_base: tail = target_base.split(".", 1)[1] if compress_manager.get_handler(tail): target.compress_scheme = tail elif compress_manager.get_handler(target.compress_scheme) is None: app_runner.handle_event(ConfigValueError( [t_key, "compress"], target.compress_scheme, KeyError(target.compress_scheme))) target.status = target.ST_BAD rename_format = self._get_conf(config, t_node, "rename-format") if rename_format: rename_parser_str = self._get_conf(config, t_node, "rename-parser") if rename_parser_str: try: rename_parser = re.compile(rename_parser_str) except re.error as exc: raise RoseArchValueError( target.name, "rename-parser", rename_parser_str, type(exc).__name__, exc) else: rename_parser = None for source in target.sources.values(): dict_ = { "cycle": os.getenv("ROSE_TASK_CYCLE_TIME"), "name": source.name} if rename_parser: match = rename_parser.match(source.name) if match: dict_.update(match.groupdict()) try: source.name = rename_format % dict_ except (KeyError, ValueError) as exc: raise RoseArchValueError( target.name, "rename-format", rename_format, type(exc).__name__, exc) return target
def _process(self, conf_tree, nodes, loc_dao, **kwargs): """Helper for self.process.""" # Ensure that everything is overwritable # Ensure that container directories exist for key, node in sorted(nodes.items()): try: name = env_var_process(key[len(self.PREFIX):]) except UnboundEnvironmentVariableError as exc: raise ConfigProcessError([key], key, exc) if os.path.exists(name) and kwargs.get("no_overwrite_mode"): exc = FileOverwriteError(name) raise ConfigProcessError([key], None, exc) self.manager.fs_util.makedirs(self.manager.fs_util.dirname(name)) # Gets a list of sources and targets sources = {} targets = {} for key, node in sorted(nodes.items()): # N.B. no need to catch UnboundEnvironmentVariableError here # because any exception should been caught earlier. name = env_var_process(key[len(self.PREFIX):]) targets[name] = Loc(name) targets[name].action_key = Loc.A_INSTALL targets[name].mode = node.get_value(["mode"]) if targets[name].mode and targets[name].mode not in Loc.MODES: raise ConfigProcessError([key, "mode"], targets[name].mode) target_sources = [] for k in ["content", "source"]: # "content" for back compat source_str = node.get_value([k]) if source_str is None: continue try: source_str = env_var_process(source_str) except UnboundEnvironmentVariableError as exc: raise ConfigProcessError([key, k], source_str, exc) source_names = [] for raw_source_glob in shlex.split(source_str): source_glob = raw_source_glob if (raw_source_glob.startswith("(") and raw_source_glob.endswith(")")): source_glob = raw_source_glob[1:-1] names = glob(source_glob) if names: source_names += sorted(names) else: source_names.append(raw_source_glob) for raw_source_name in source_names: source_name = raw_source_name is_optional = (raw_source_name.startswith("(") and raw_source_name.endswith(")")) if is_optional: source_name = raw_source_name[1:-1] if source_name.startswith("~"): source_name = os.path.expanduser(source_name) if targets[name].mode in [ targets[name].MODE_SYMLINK, targets[name].MODE_SYMLINK_PLUS]: if targets[name].real_name: # Symlink mode can only have 1 source raise ConfigProcessError([key, k], source_str) targets[name].real_name = source_name else: if source_name not in sources: sources[source_name] = Loc(source_name) sources[source_name].action_key = Loc.A_SOURCE sources[source_name].is_optional = is_optional sources[source_name].used_by_names.append(name) target_sources.append(sources[source_name]) targets[name].dep_locs = target_sources if not targets[name].real_name and targets[name].mode in [ targets[name].MODE_SYMLINK, targets[name].MODE_SYMLINK_PLUS]: raise ConfigProcessError([key, "source"], None) # Determine the scheme of the location from configuration. config_schemes_str = conf_tree.node.get_value(["schemes"]) config_schemes = [] # [(pattern, scheme), ...] if config_schemes_str: for line in config_schemes_str.splitlines(): pattern, scheme = line.split("=", 1) pattern = pattern.strip() scheme = scheme.strip() config_schemes.append((pattern, scheme)) # Where applicable, determine for each source: # * Its real name. # * The checksums of its paths. # * Whether it can be considered unchanged. for source in sources.values(): try: for pattern, scheme in config_schemes: if fnmatch(source.name, pattern): source.scheme = scheme break self.loc_handlers_manager.parse(source, conf_tree) except ValueError as exc: if source.is_optional: sources.pop(source.name) for name in source.used_by_names: targets[name].dep_locs.remove(source) event = SourceSkipEvent(name, source.name) self.handle_event(event) continue else: raise ConfigProcessError( ["file:" + source.used_by_names[0], "source"], source.name) prev_source = loc_dao.select(source.name) source.is_out_of_date = ( not prev_source or (not source.key and not source.paths) or prev_source.scheme != source.scheme or prev_source.loc_type != source.loc_type or prev_source.key != source.key or sorted(prev_source.paths) != sorted(source.paths)) # Inspect each target to see if it is out of date: # * Target does not already exist. # * Target exists, but does not have a database entry. # * Target exists, but does not match settings in database. # * Target exists, but a source cannot be considered unchanged. for target in targets.values(): if target.real_name: target.is_out_of_date = ( not os.path.islink(target.name) or target.real_name != os.readlink(target.name)) elif target.mode == target.MODE_MKDIR: target.is_out_of_date = ( os.path.islink(target.name) or not os.path.isdir(target.name)) else: # See if target is modified compared with previous record if (os.path.exists(target.name) and not os.path.islink(target.name)): for path, checksum, access_mode in get_checksum( target.name): target.add_path(path, checksum, access_mode) target.paths.sort() prev_target = loc_dao.select(target.name) target.is_out_of_date = ( os.path.islink(target.name) or not os.path.exists(target.name) or prev_target is None or prev_target.mode != target.mode or len(prev_target.paths) != len(target.paths)) if not target.is_out_of_date: for prev_path, path in zip( prev_target.paths, target.paths): if prev_path != path: target.is_out_of_date = True break # See if any sources out of date if not target.is_out_of_date: for dep_loc in target.dep_locs: if dep_loc.is_out_of_date: target.is_out_of_date = True break if target.is_out_of_date: target.paths = None loc_dao.delete_locs.append(target) # Set up jobs for rebuilding all out-of-date targets. jobs = {} for name, target in sorted(targets.items()): if not target.is_out_of_date: self.handle_event(FileUnchangedEvent(target, level=Event.V)) continue if target.mode in [target.MODE_SYMLINK, target.MODE_SYMLINK_PLUS]: if target.mode == target.MODE_SYMLINK_PLUS: try: os.stat(target.real_name) except OSError as exc: raise ConfigProcessError( [self.PREFIX + target.name, "source"], target.real_name, exc) self.manager.fs_util.symlink(target.real_name, target.name) loc_dao.update_locs.append(target) elif target.mode == target.MODE_MKDIR: if os.path.islink(target.name): self.manager.fs_util.delete(target.name) self.manager.fs_util.makedirs(target.name) loc_dao.update_locs.append(target) target.loc_type = target.TYPE_TREE target.add_path(target.BLOB, None, None) elif target.dep_locs: if os.path.islink(target.name): self.manager.fs_util.delete(target.name) jobs[target.name] = JobProxy(target) for source in target.dep_locs: if source.name not in jobs: jobs[source.name] = JobProxy(source) jobs[source.name].event_level = Event.V job = jobs[source.name] jobs[target.name].pending_for[source.name] = job p_name = target.name while (os.path.dirname(p_name) and os.path.dirname(p_name) != p_name): p_name = os.path.dirname(p_name) if p_name in jobs: jobs[target.name].pending_for[p_name] = jobs[p_name] else: self.manager.fs_util.install(target.name) target.loc_type = target.TYPE_BLOB for path, checksum, access_mode in get_checksum(target.name): target.add_path(path, checksum, access_mode) loc_dao.update_locs.append(target) loc_dao.execute_queued_items() # If relevant, use job runner to get sources and build targets if jobs: work_dir = mkdtemp() try: nproc_keys = ["rose.config_processors.fileinstall", "nproc"] nproc_str = conf_tree.node.get_value(nproc_keys) nproc = None if nproc_str is not None: nproc = int(nproc_str) job_runner = JobRunner(self, nproc) job_runner(JobManager(jobs), conf_tree, loc_dao, work_dir) except ValueError as exc: if exc.args and exc.args[0] in jobs: job = jobs[exc.args[0]] if job.context.action_key == Loc.A_SOURCE: source = job.context keys = [self.PREFIX + source.used_by_names[0], "source"] raise ConfigProcessError(keys, source.name) raise exc finally: loc_dao.execute_queued_items() rmtree(work_dir) # Target checksum compare and report for target in targets.values(): if (not target.is_out_of_date or target.loc_type == target.TYPE_TREE): continue keys = [self.PREFIX + target.name, "checksum"] checksum_expected = conf_tree.node.get_value(keys) if checksum_expected is None: continue checksum = target.paths[0].checksum if checksum_expected: if len(checksum_expected) != len(checksum): algorithm = guess_checksum_algorithm(checksum_expected) if algorithm: checksum = get_checksum_func(algorithm)(target.name) if checksum_expected != checksum: exc = ChecksumError(checksum_expected, checksum) raise ConfigProcessError(keys, checksum_expected, exc) event = ChecksumEvent(target.name, target.paths[0].checksum) self.handle_event(event)
def _run_target_setup(self, app_runner, compress_manager, config, t_key, t_node): """Helper for _run. Set up a target.""" target_prefix = self._get_conf(config, t_node, "target-prefix", default="") s_key_tail = t_key.split(":", 1)[1] try: s_key_tail = env_var_process(s_key_tail) except UnboundEnvironmentVariableError as exc: raise ConfigValueError([t_key, ""], "", exc) is_compulsory_target = True if s_key_tail.startswith("(") and s_key_tail.endswith(")"): s_key_tail = s_key_tail[1:-1] is_compulsory_target = False target = RoseArchTarget(target_prefix + s_key_tail) target.command_format = self._get_conf(config, t_node, "command-format", compulsory=True) try: target.command_format % {"sources": "", "target": ""} except KeyError as exc: target.status = target.ST_BAD app_runner.handle_event( RoseArchValueError(target.name, "command-format", target.command_format, type(exc).__name__, exc)) target.source_edit_format = self._get_conf(config, t_node, "source-edit-format", default="") try: target.source_edit_format % {"in": "", "out": ""} except KeyError as exc: target.status = target.ST_BAD app_runner.handle_event( RoseArchValueError(target.name, "source-edit-format", target.source_edit_format, type(exc).__name__, exc)) update_check_str = self._get_conf(config, t_node, "update-check") try: checksum_func = get_checksum_func(update_check_str) except ValueError as exc: raise RoseArchValueError(target.name, "update-check", update_check_str, type(exc).__name__, exc) source_prefix = self._get_conf(config, t_node, "source-prefix", default="") for source_glob in shlex.split( self._get_conf(config, t_node, "source", compulsory=True)): is_compulsory_source = is_compulsory_target if source_glob.startswith("(") and source_glob.endswith(")"): source_glob = source_glob[1:-1] is_compulsory_source = False paths = glob(source_prefix + source_glob) if not paths: exc = OSError(errno.ENOENT, os.strerror(errno.ENOENT), source_prefix + source_glob) app_runner.handle_event( ConfigValueError([t_key, "source"], source_glob, exc)) if is_compulsory_source: target.status = target.ST_BAD continue for path in paths: # N.B. source_prefix may not be a directory name = path[len(source_prefix):] for path_, checksum, _ in get_checksum(path, checksum_func): if checksum is None: # is directory continue if path_: target.sources[checksum] = RoseArchSource( checksum, os.path.join(name, path_), os.path.join(path, path_)) else: # path is a file target.sources[checksum] = RoseArchSource( checksum, name, path) if not target.sources: if is_compulsory_target: target.status = target.ST_BAD else: target.status = target.ST_NULL target.compress_scheme = self._get_conf(config, t_node, "compress") if not target.compress_scheme: target_base = target.name if "/" in target.name: target_base = target.name.rsplit("/", 1)[1] if "." in target_base: tail = target_base.split(".", 1)[1] if compress_manager.get_handler(tail): target.compress_scheme = tail elif compress_manager.get_handler(target.compress_scheme) is None: app_runner.handle_event( ConfigValueError([t_key, "compress"], target.compress_scheme, KeyError(target.compress_scheme))) target.status = target.ST_BAD rename_format = self._get_conf(config, t_node, "rename-format") if rename_format: rename_parser_str = self._get_conf(config, t_node, "rename-parser") if rename_parser_str: try: rename_parser = re.compile(rename_parser_str) except re.error as exc: raise RoseArchValueError(target.name, "rename-parser", rename_parser_str, type(exc).__name__, exc) else: rename_parser = None for source in target.sources.values(): dict_ = { "cycle": os.getenv("ROSE_TASK_CYCLE_TIME"), "name": source.name } if rename_parser: match = rename_parser.match(source.name) if match: dict_.update(match.groupdict()) try: source.name = rename_format % dict_ except (KeyError, ValueError) as exc: raise RoseArchValueError(target.name, "rename-format", rename_format, type(exc).__name__, exc) return target
def _run(self, dao, app_runner, config): """Transform and archive suite files. This application is designed to work under "rose task-run" in a suite. """ path = os.path.dirname(os.path.dirname(sys.modules["rose"].__file__)) compress_manager = SchemeHandlersManager( [path], "rose.apps.rose_arch_compressions", ["compress_sources"], None, app_runner) # Set up the targets cycle = os.getenv("ROSE_TASK_CYCLE_TIME") targets = [] for t_key, t_node in sorted(config.value.items()): if t_node.is_ignored() or ":" not in t_key: continue s_key_head, s_key_tail = t_key.split(":", 1) if s_key_head != self.SECTION or not s_key_tail: continue target_prefix = self._get_conf(config, t_node, "target-prefix", default="") try: s_key_tail = env_var_process(s_key_tail) except UnboundEnvironmentVariableError as exc: raise ConfigValueError([t_key, ""], "", exc) target_name = target_prefix + s_key_tail target = RoseArchTarget(target_name) target.command_format = self._get_conf(config, t_node, "command-format", compulsory=True) try: target.command_format % {"sources": "", "target": ""} except KeyError as exc: target.status = target.ST_BAD app_runner.handle_event( RoseArchValueError(target.name, "command-format", target.command_format, type(exc).__name__, exc)) source_str = self._get_conf(config, t_node, "source", compulsory=True) source_prefix = self._get_conf(config, t_node, "source-prefix", default="") target.source_edit_format = self._get_conf(config, t_node, "source-edit-format", default="") try: target.source_edit_format % {"in": "", "out": ""} except KeyError as exc: target.status = target.ST_BAD app_runner.handle_event( RoseArchValueError(target.name, "source-edit-format", target.source_edit_format, type(exc).__name__, exc)) update_check_str = self._get_conf(config, t_node, "update-check", default="md5sum") try: checksum_func = get_checksum_func(update_check_str) except KeyError as exc: raise RoseArchValueError(target.name, "update-check", update_check_str, type(exc).__name__, exc) for source_glob in shlex.split(source_str): paths = glob(source_prefix + source_glob) if not paths: exc = OSError(errno.ENOENT, os.strerror(errno.ENOENT), source_glob) app_runner.handle_event( ConfigValueError([t_key, "source"], source_glob, exc)) target.status = target.ST_BAD continue for path in paths: # N.B. source_prefix may not be a directory name = path[len(source_prefix):] for path_, checksum, _ in get_checksum( path, checksum_func): if checksum is None: # is directory continue if path_: target.sources[checksum] = RoseArchSource( checksum, os.path.join(name, path_), os.path.join(path, path_)) else: # path is a file target.sources[checksum] = RoseArchSource( checksum, name, path) target.compress_scheme = self._get_conf(config, t_node, "compress") if target.compress_scheme: if (compress_manager.get_handler(target.compress_scheme) is None): app_runner.handle_event( ConfigValueError([t_key, "compress"], target.compress_scheme, KeyError(target.compress_scheme))) target.status = target.ST_BAD else: target_base = target.name if "/" in target.name: target_base = target.name.rsplit("/", 1)[1] if "." in target_base: tail = target_base.split(".", 1)[1] if compress_manager.get_handler(tail): target.compress_scheme = tail rename_format = self._get_conf(config, t_node, "rename-format") if rename_format: rename_parser_str = self._get_conf(config, t_node, "rename-parser") if rename_parser_str: try: rename_parser = re.compile(rename_parser_str) except re.error as exc: raise RoseArchValueError(target.name, "rename-parser", rename_parser_str, type(exc).__name__, exc) else: rename_parser = None for source in target.sources.values(): dict_ = {"cycle": cycle, "name": source.name} if rename_parser: match = rename_parser.match(source.name) if match: dict_.update(match.groupdict()) try: source.name = rename_format % dict_ except (KeyError, ValueError) as exc: raise RoseArchValueError(target.name, "rename-format", rename_format, type(exc).__name__, exc) old_target = dao.select(target.name) if old_target is None or old_target != target: dao.delete(target) else: target.status = target.ST_OLD targets.append(target) # Delete from database items that are no longer relevant dao.delete_all(filter_targets=targets) # Update the targets for target in targets: if target.status == target.ST_OLD: app_runner.handle_event(RoseArchEvent(target)) continue target.command_rc = 1 dao.insert(target) if target.status == target.ST_BAD: app_runner.handle_event(RoseArchEvent(target)) continue work_dir = mkdtemp() t_init = time() t_tran, t_arch = t_init, t_init ret_code = None try: # Rename/edit sources target.status = target.ST_BAD rename_required = False for source in target.sources.values(): if source.name != source.orig_name: rename_required = True break if rename_required or target.source_edit_format: for source in target.sources.values(): source.path = os.path.join(work_dir, source.name) source_path_d = os.path.dirname(source.path) app_runner.fs_util.makedirs(source_path_d) if target.source_edit_format: fmt_args = { "in": source.orig_path, "out": source.path } command = target.source_edit_format % fmt_args app_runner.popen.run_ok(command, shell=True) else: app_runner.fs_util.symlink(source.orig_path, source.path) # Compress sources if target.compress_scheme: handler = compress_manager.get_handler( target.compress_scheme) handler.compress_sources(target, work_dir) t_tran = time() # Run archive command sources = [] if target.work_source_path: sources = [target.work_source_path] else: for source in target.sources.values(): sources.append(source.path) sources_str = app_runner.popen.list_to_shell_str(sources) target_str = app_runner.popen.list_to_shell_str([target.name]) command = target.command_format % { "sources": sources_str, "target": target_str } ret_code, out, err = app_runner.popen.run(command, shell=True) t_arch = time() if ret_code: app_runner.handle_event( RosePopenError([command], ret_code, out, err)) else: target.status = target.ST_NEW app_runner.handle_event(err, kind=Event.KIND_ERR) app_runner.handle_event(out) target.command_rc = ret_code dao.update_command_rc(target) finally: app_runner.fs_util.delete(work_dir) app_runner.handle_event( RoseArchEvent(target, [t_init, t_tran, t_arch], ret_code)) return [target.status for target in targets].count(RoseArchTarget.ST_BAD)