Exemplo n.º 1
0
 async def _source_pull(self, source, conf_tree, work_dir):
     """Pulls a source to its cache in the work directory."""
     source.cache = os.path.join(
         work_dir,
         # checksum the source name (not it's contents).
         # this is a cheap solution to provide a unique, repeatable
         # and filesystem safe identifier for a source name (which could be
         # a url).
         get_checksum_func()(BytesIO(source.name.encode())))
     return await self.loc_handlers_manager.pull(source, conf_tree)
Exemplo n.º 2
0
 def _run_target_setup(self,
                       app_runner,
                       compress_manager,
                       config,
                       t_key,
                       s_key_tail,
                       t_node,
                       is_compulsory_target=True):
     """Helper for _run. Set up a target."""
     target_prefix = self._get_conf(config,
                                    t_node,
                                    "target-prefix",
                                    default="")
     target = RoseArchTarget(target_prefix + s_key_tail)
     target.command_format = self._get_conf(config,
                                            t_node,
                                            "command-format",
                                            compulsory=True)
     try:
         target.command_format % {"sources": "", "target": ""}
     except KeyError as exc:
         target.status = target.ST_BAD
         app_runner.handle_event(
             RoseArchValueError(target.name, "command-format",
                                target.command_format,
                                type(exc).__name__, exc))
     target.source_edit_format = self._get_conf(config,
                                                t_node,
                                                "source-edit-format",
                                                default="")
     try:
         target.source_edit_format % {"in": "", "out": ""}
     except KeyError as exc:
         target.status = target.ST_BAD
         app_runner.handle_event(
             RoseArchValueError(target.name, "source-edit-format",
                                target.source_edit_format,
                                type(exc).__name__, exc))
     update_check_str = self._get_conf(config, t_node, "update-check")
     try:
         checksum_func = get_checksum_func(update_check_str)
     except ValueError as exc:
         raise RoseArchValueError(target.name, "update-check",
                                  update_check_str,
                                  type(exc).__name__, exc)
     source_prefix = self._get_conf(config,
                                    t_node,
                                    "source-prefix",
                                    default="")
     for source_glob in shlex.split(
             self._get_conf(config, t_node, "source", compulsory=True)):
         is_compulsory_source = is_compulsory_target
         if source_glob.startswith("(") and source_glob.endswith(")"):
             source_glob = source_glob[1:-1]
             is_compulsory_source = False
         paths = glob(source_prefix + source_glob)
         if not paths:
             exc = OSError(errno.ENOENT, os.strerror(errno.ENOENT),
                           source_prefix + source_glob)
             app_runner.handle_event(
                 ConfigValueError([t_key, "source"], source_glob, exc))
             if is_compulsory_source:
                 target.status = target.ST_BAD
             continue
         for path in paths:
             # N.B. source_prefix may not be a directory
             name = path[len(source_prefix):]
             for path_, checksum, _ in get_checksum(path, checksum_func):
                 if checksum is None:  # is directory
                     continue
                 if path_:
                     target.sources[checksum] = RoseArchSource(
                         checksum, os.path.join(name, path_),
                         os.path.join(path, path_))
                 else:  # path is a file
                     target.sources[checksum] = RoseArchSource(
                         checksum, name, path)
     if not target.sources:
         if is_compulsory_target:
             target.status = target.ST_BAD
         else:
             target.status = target.ST_NULL
     target.compress_scheme = self._get_conf(config, t_node, "compress")
     if not target.compress_scheme:
         target_base = target.name
         if "/" in target.name:
             target_base = target.name.rsplit("/", 1)[1]
         if "." in target_base:
             tail = target_base.split(".", 1)[1]
             if compress_manager.get_handler(tail):
                 target.compress_scheme = tail
     elif compress_manager.get_handler(target.compress_scheme) is None:
         app_runner.handle_event(
             ConfigValueError([t_key, "compress"], target.compress_scheme,
                              KeyError(target.compress_scheme)))
         target.status = target.ST_BAD
     rename_format = self._get_conf(config, t_node, "rename-format")
     if rename_format:
         rename_parser_str = self._get_conf(config, t_node, "rename-parser")
         if rename_parser_str:
             try:
                 rename_parser = re.compile(rename_parser_str)
             except re.error as exc:
                 raise RoseArchValueError(target.name, "rename-parser",
                                          rename_parser_str,
                                          type(exc).__name__, exc)
         else:
             rename_parser = None
         for source in target.sources.values():
             dict_ = {
                 "cycle": os.getenv("ROSE_TASK_CYCLE_TIME"),
                 "name": source.name
             }
             if rename_parser:
                 match = rename_parser.match(source.name)
                 if match:
                     dict_.update(match.groupdict())
             try:
                 source.name = rename_format % dict_
             except (KeyError, ValueError) as exc:
                 raise RoseArchValueError(target.name, "rename-format",
                                          rename_format,
                                          type(exc).__name__, exc)
     return target
Exemplo n.º 3
0
    def _process(self, conf_tree, nodes, loc_dao, **kwargs):
        """Helper for self.process."""
        # Ensure that everything is overwritable
        # Ensure that container directories exist
        for key, node in sorted(nodes.items()):
            try:
                name = env_var_process(key[len(self.PREFIX) :])
            except UnboundEnvironmentVariableError as exc:
                raise ConfigProcessError([key], key, exc)
            if os.path.exists(name) and kwargs.get("no_overwrite_mode"):
                raise ConfigProcessError([key], None, FileOverwriteError(name))
            self.manager.fs_util.makedirs(self.manager.fs_util.dirname(name))
        # Gets a list of sources and targets
        sources = {}
        targets = {}
        for key, node in sorted(nodes.items()):
            # N.B. no need to catch UnboundEnvironmentVariableError here
            #      because any exception should been caught earlier.
            name = env_var_process(key[len(self.PREFIX) :])
            targets[name] = Loc(name)
            targets[name].action_key = Loc.A_INSTALL
            targets[name].mode = node.get_value(["mode"])
            if targets[name].mode and targets[name].mode not in Loc.MODES:
                raise ConfigProcessError([key, "mode"], targets[name].mode)
            target_sources = []
            for k in ["content", "source"]:  # "content" for back compat
                source_str = node.get_value([k])
                if source_str is None:
                    continue
                try:
                    source_str = env_var_process(source_str)
                except UnboundEnvironmentVariableError as exc:
                    raise ConfigProcessError([key, k], source_str, exc)
                source_names = []
                for raw_source_glob in shlex.split(source_str):
                    source_glob = raw_source_glob
                    if raw_source_glob.startswith(
                        "("
                    ) and raw_source_glob.endswith(")"):
                        source_glob = raw_source_glob[1:-1]
                    names = glob(source_glob)
                    if names:
                        source_names += sorted(names)
                    else:
                        source_names.append(raw_source_glob)
                for raw_source_name in source_names:
                    source_name = raw_source_name
                    is_optional = raw_source_name.startswith(
                        "("
                    ) and raw_source_name.endswith(")")
                    if is_optional:
                        source_name = raw_source_name[1:-1]
                    if source_name.startswith("~"):
                        source_name = os.path.expanduser(source_name)
                    if targets[name].mode in [
                        targets[name].MODE_SYMLINK,
                        targets[name].MODE_SYMLINK_PLUS,
                    ]:
                        if targets[name].real_name:
                            # Symlink mode can only have 1 source
                            raise ConfigProcessError([key, k], source_str)
                        targets[name].real_name = source_name
                    else:
                        if source_name not in sources:
                            sources[source_name] = Loc(source_name)
                            sources[source_name].action_key = Loc.A_SOURCE
                            sources[source_name].is_optional = is_optional
                        sources[source_name].used_by_names.append(name)
                        target_sources.append(sources[source_name])
            targets[name].dep_locs = target_sources
            if not targets[name].real_name and targets[name].mode in [
                targets[name].MODE_SYMLINK,
                targets[name].MODE_SYMLINK_PLUS,
            ]:
                raise ConfigProcessError([key, "source"], None)

        # Determine the scheme of the location from configuration.
        config_schemes_str = conf_tree.node.get_value(["schemes"])
        config_schemes = []  # [(pattern, scheme), ...]
        if config_schemes_str:
            for line in config_schemes_str.splitlines():
                pattern, scheme = line.split("=", 1)
                pattern = pattern.strip()
                scheme = scheme.strip()
                config_schemes.append((pattern, scheme))

        # Where applicable, determine for each source:
        # * Its real name.
        # * The checksums of its paths.
        # * Whether it can be considered unchanged.
        for source in list(sources.values()):
            try:
                for pattern, scheme in config_schemes:
                    if fnmatch(source.name, pattern):
                        source.scheme = scheme
                        break
                self.loc_handlers_manager.parse(source, conf_tree)
            except ValueError:
                if source.is_optional:
                    sources.pop(source.name)
                    for name in source.used_by_names:
                        targets[name].dep_locs.remove(source)
                        event = SourceSkipEvent(name, source.name)
                        self.handle_event(event)
                    continue
                else:
                    raise ConfigProcessError(
                        ["file:" + source.used_by_names[0], "source"],
                        source.name,
                    )
            prev_source = loc_dao.select(source.name)
            source.is_out_of_date = (
                not prev_source
                or (not source.key and not source.paths)
                or prev_source.scheme != source.scheme
                or prev_source.loc_type != source.loc_type
                or prev_source.key != source.key
                or sorted(prev_source.paths) != sorted(source.paths)
            )

        # Inspect each target to see if it is out of date:
        # * Target does not already exist.
        # * Target exists, but does not have a database entry.
        # * Target exists, but does not match settings in database.
        # * Target exists, but a source cannot be considered unchanged.
        for target in list(targets.values()):
            if target.real_name:
                target.is_out_of_date = not os.path.islink(
                    target.name
                ) or target.real_name != os.readlink(target.name)
            elif target.mode == target.MODE_MKDIR:
                target.is_out_of_date = os.path.islink(
                    target.name
                ) or not os.path.isdir(target.name)
            else:
                if os.path.exists(target.name) and not os.path.islink(
                    target.name
                ):
                    for path, checksum, access_mode in get_checksum(
                        target.name
                    ):
                        target.add_path(path, checksum, access_mode)
                    target.paths.sort()
                prev_target = loc_dao.select(target.name)
                target.is_out_of_date = (
                    os.path.islink(target.name)
                    or not os.path.exists(target.name)
                    or prev_target is None
                    or prev_target.mode != target.mode
                    or len(prev_target.paths) != len(target.paths)
                )
                if not target.is_out_of_date:
                    prev_target.paths.sort()
                    for prev_path, path in zip(
                        prev_target.paths, target.paths
                    ):
                        if prev_path != path:
                            target.is_out_of_date = True
                            break
                # See if any sources have changed names.
                if not target.is_out_of_date:
                    conn = loc_dao.get_conn()
                    prev_dep_locs = conn.execute(
                        """
                            SELECT *
                            FROM dep_names
                            WHERE name=?
                            ORDER BY ROWID
                        """,
                        [target.name],
                    ).fetchall()
                    prev_dep_locs = [i[1] for i in prev_dep_locs]
                    prev_dep_locs = [loc_dao.select(i) for i in prev_dep_locs]
                    if [i.name for i in prev_dep_locs] != [
                        i.name for i in target.dep_locs
                    ]:
                        target.is_out_of_date = True
                # See if any sources out of date
                if not target.is_out_of_date:
                    for dep_loc in target.dep_locs:
                        if dep_loc.is_out_of_date:
                            target.is_out_of_date = True
                            break
            if target.is_out_of_date:
                target.paths = None
                loc_dao.delete_locs.append(target)

        # Set up jobs for rebuilding all out-of-date targets.
        jobs = {}
        for name, target in sorted(targets.items()):
            if not target.is_out_of_date:
                self.handle_event(FileUnchangedEvent(target, level=Event.V))
                continue
            if target.mode in [target.MODE_SYMLINK, target.MODE_SYMLINK_PLUS]:
                if target.mode == target.MODE_SYMLINK_PLUS:
                    try:
                        os.stat(target.real_name)
                    except OSError as exc:
                        raise ConfigProcessError(
                            [self.PREFIX + target.name, "source"],
                            target.real_name,
                            exc,
                        )
                self.manager.fs_util.symlink(target.real_name, target.name)
                loc_dao.update_locs.append(target)
            elif target.mode == target.MODE_MKDIR:
                if os.path.islink(target.name):
                    self.manager.fs_util.delete(target.name)
                self.manager.fs_util.makedirs(target.name)
                loc_dao.update_locs.append(target)
                target.loc_type = target.TYPE_TREE
                target.add_path(target.BLOB, None, None)
            elif target.dep_locs:
                if os.path.islink(target.name):
                    self.manager.fs_util.delete(target.name)
                jobs[target.name] = JobProxy(target)
                for source in target.dep_locs:
                    if source.name not in jobs:
                        jobs[source.name] = JobProxy(source)
                        jobs[source.name].event_level = Event.V
                    job = jobs[source.name]
                    jobs[target.name].pending_for[source.name] = job
                p_name = target.name
                while (
                    os.path.dirname(p_name)
                    and os.path.dirname(p_name) != p_name
                ):
                    p_name = os.path.dirname(p_name)
                    if p_name in jobs:
                        jobs[target.name].pending_for[p_name] = jobs[p_name]
            else:
                self.manager.fs_util.install(target.name)
                target.loc_type = target.TYPE_BLOB
                for path, checksum, access_mode in get_checksum(target.name):
                    target.add_path(path, checksum, access_mode)
                loc_dao.update_locs.append(target)
        loc_dao.execute_queued_items()

        # If relevant, use job runner to get sources and build targets
        if jobs:
            work_dir = mkdtemp()
            try:
                nproc_keys = ["rose.config_processors.fileinstall", "nproc"]
                nproc_str = conf_tree.node.get_value(nproc_keys)
                nproc = None
                if nproc_str is not None:
                    nproc = int(nproc_str)
                job_runner = JobRunner(self, nproc)
                job_runner(JobManager(jobs), conf_tree, loc_dao, work_dir)
            except ValueError as exc:
                if exc.args and exc.args[0] in jobs:
                    job = jobs[exc.args[0]]
                    if job.context.action_key == Loc.A_SOURCE:
                        source = job.context
                        keys = [
                            self.PREFIX + source.used_by_names[0],
                            "source",
                        ]
                        raise ConfigProcessError(keys, source.name)
                raise exc
            finally:
                loc_dao.execute_queued_items()
                rmtree(work_dir)

        # Target checksum compare and report
        for target in list(targets.values()):
            if (
                not target.is_out_of_date
                or target.loc_type == target.TYPE_TREE
            ):
                continue
            keys = [self.PREFIX + target.name, "checksum"]
            checksum_expected = conf_tree.node.get_value(keys)
            if checksum_expected is None:
                continue
            checksum = target.paths[0].checksum
            if checksum_expected:
                if len(checksum_expected) != len(checksum):
                    algorithm = guess_checksum_algorithm(checksum_expected)
                    if algorithm:
                        checksum = get_checksum_func(algorithm)(target.name)
                if checksum_expected != checksum:
                    raise ConfigProcessError(
                        keys,
                        checksum_expected,
                        ChecksumError(checksum_expected, checksum),
                    )
            event = ChecksumEvent(target.name, target.paths[0].checksum)
            self.handle_event(event)