Exemple #1
0
def test_get_default_modules_invalid_branch(
    mock_get_rawhide, mock_scm, mock_mmd_new, mock_uses_rawhide, mock_mkdtemp, mock_rmtree,
    uses_rawhide,
):
    """
    Test that _get_default_modules raises an exception with an invalid branch.
    """
    mock_uses_rawhide.return_value = uses_rawhide
    mock_scm.return_value.sourcedir = "/some/path"
    mock_scm.return_value.checkout_ref.side_effect = [
        UnprocessableEntity("invalid branch"),
        UnprocessableEntity("invalid branch"),
    ]
    if uses_rawhide:
        mock_get_rawhide.return_value = "f32"
    else:
        mock_get_rawhide.return_value = "something_else"

    with pytest.raises(RuntimeError, match="Failed to retrieve the default modules"):
        default_modules._get_default_modules("f32", conf.default_modules_scm_url)

    mock_mmd_new.assert_not_called()
    if uses_rawhide:
        mock_scm.return_value.checkout_ref.assert_has_calls(
            [call("f32"), call(conf.rawhide_branch)],
        )
    else:
        mock_scm.return_value.checkout_ref.assert_called_once_with("f32")
Exemple #2
0
    def checkout(self, scmdir):
        """Checkout the module from SCM.

        :param str scmdir: The working directory
        :returns: str -- the directory that the module was checked-out into
        :raises: RuntimeError
        """
        # TODO: sanity check arguments
        if self.scheme == "git":
            if not self._cloned:
                self.clone(scmdir)

            try:
                self.checkout_ref(self.commit)
            except UnprocessableEntity as e:
                if (str(e).endswith(
                        ' did not match any file(s) known to git.\\n"')
                        or "fatal: reference is not a tree: " in str(e)):
                    raise UnprocessableEntity(
                        "checkout: The requested commit hash was not found within the repository. "
                        "Perhaps you forgot to push. The original message was: %s"
                        % str(e))
                raise

            timestamp = SCM._run(["git", "show", "-s", "--format=%ct"],
                                 chdir=self.sourcedir)[1]
            dt = datetime.datetime.utcfromtimestamp(int(timestamp))
            self.version = dt.strftime("%Y%m%d%H%M%S")
        else:
            raise RuntimeError("checkout: Unhandled SCM scheme.")
        return self.sourcedir
Exemple #3
0
def test_get_default_modules(
    mock_get_rawhide, mock_scm, mock_mmd_new, mock_mkdtemp, mock_rmtree, is_rawhide,
):
    """
    Test that _get_default_modules returns the default modules.
    """
    mock_scm.return_value.sourcedir = "/some/path"
    if is_rawhide:
        mock_scm.return_value.checkout_ref.side_effect = [
            UnprocessableEntity("invalid branch"),
            None,
        ]
        mock_get_rawhide.return_value = "f32"

    expected = {"nodejs": "11"}
    mock_mmd_new.return_value.get_default_streams.return_value = expected

    rv = default_modules._get_default_modules("f32", conf.default_modules_scm_url)

    assert rv == expected
    if is_rawhide:
        mock_scm.return_value.checkout_ref.assert_has_calls(
            [call("f32"), call(conf.rawhide_branch)]
        )
    else:
        mock_scm.return_value.checkout_ref.assert_called_once_with("f32")
Exemple #4
0
def load_mmd(yaml, is_file=False):
    if not yaml:
        raise UnprocessableEntity('The input modulemd was empty')

    target_mmd_version = Modulemd.ModuleStreamVersionEnum.TWO
    try:
        if is_file:
            mmd = Modulemd.ModuleStream.read_file(yaml, True)
        else:
            mmd = Modulemd.ModuleStream.read_string(to_text_type(yaml), True)
        mmd.validate()
        if mmd.get_mdversion() < target_mmd_version:
            mmd = mmd.upgrade(target_mmd_version)
        elif mmd.get_mdversion() > target_mmd_version:
            log.error("Encountered a modulemd file with the version %d", mmd.get_mdversion())
            raise UnprocessableEntity(
                "The modulemd version cannot be greater than {}".format(target_mmd_version))
    except ModuleMDError as e:
        not_found = False
        if is_file:
            error = "The modulemd {} is invalid.".format(os.path.basename(yaml))
            if os.path.exists(yaml):
                with open(yaml, "rt") as yaml_hdl:
                    log.debug("Modulemd that failed to load:\n%s", yaml_hdl.read())
            else:
                not_found = True
                error = "The modulemd file {} was not found.".format(os.path.basename(yaml))
                log.error("The modulemd file %s was not found.", yaml)
        else:
            error = "The modulemd is invalid."
            log.debug("Modulemd that failed to load:\n%s", yaml)

        if "modulemd-error-quark: " in str(e):
            error = "{} The error was '{}'.".format(
                error, str(e).split("modulemd-error-quark: ")[-1])
        elif "Unknown ModuleStream version" in str(e):
            error = (
                "{}. The modulemd version can't be greater than {}."
                .format(error, target_mmd_version)
            )
        elif not_found is False:
            error = "{} Please verify the syntax is correct.".format(error)

        log.exception(error)
        raise UnprocessableEntity(error)

    return mmd
    def _get_modules(self,
                     name,
                     stream,
                     version=None,
                     context=None,
                     states=None,
                     strict=False,
                     **kwargs):
        """Query and return modules from MBS with specific info

        :param str name: module's name.
        :param str stream: module's stream.
        :kwarg str version: a string or int of the module's version. When None,
            latest version will be returned.
        :kwarg str context: module's context. Optional.
        :kwarg str state: module's state. Defaults to ``ready``.
        :kwarg bool strict: Normally this function returns None if no module can be
            found. If strict=True, then an UnprocessableEntity is raised.
        :return: final list of module_info which pass repoclosure
        :rtype: list[dict]
        :raises UnprocessableEntity: if no modules are found and ``strict`` is True.
        """
        query = self._query_from_nsvc(name, stream, version, context, states)
        query["page"] = 1
        query["per_page"] = 10
        query.update(kwargs)
        modules = []

        while True:
            res = requests_session.get(self.mbs_prod_url, params=query)
            if not res.ok:
                raise RuntimeError(self._generic_error %
                                   (query, res.status_code))

            data = res.json()
            modules_per_page = data["items"]
            modules += modules_per_page

            if not data["meta"]["next"]:
                break

            query["page"] += 1

        # Error handling
        if not modules:
            if strict:
                raise UnprocessableEntity("Failed to find module in MBS %r" %
                                          query)
            else:
                return modules

        if version is None and "stream_version_lte" not in kwargs:
            # Only return the latest version
            return [
                m for m in modules if m["version"] == modules[0]["version"]
            ]
        else:
            return modules
    def resolve_profiles(self, mmd, keys):
        """
        Returns a dictionary with keys set according the `keys` parameters and values
        set to the union of all components defined in all installation profiles matching
        the key in all buildrequires. If there are some modules loaded by
        load_local_builds(...), these local modules will be considered when returning
        the profiles.
        :param mmd: Modulemd.ModuleStream instance representing the module
        :param keys: list of modulemd installation profiles to include in the result
        :return: a dictionary
        """
        results = {}
        for key in keys:
            results[key] = set()
        for module_name, module_info in mmd.get_xmd(
        )["mbs"]["buildrequires"].items():
            local_modules = models.ModuleBuild.local_modules(
                self.db_session, module_name, module_info["stream"])
            if local_modules:
                local_module = local_modules[0]
                log.info(
                    "Using local module {0!r} to resolve profiles.".format(
                        local_module))
                dep_mmd = local_module.mmd()
                for key in keys:
                    profile = dep_mmd.get_profile(key)
                    if profile:
                        results[key] |= set(profile.get_rpms())
                continue

            build = models.ModuleBuild.get_build_from_nsvc(
                self.db_session,
                module_name,
                module_info["stream"],
                module_info["version"],
                module_info["context"],
                state=models.BUILD_STATES["ready"],
            )
            if not build:
                raise UnprocessableEntity(
                    "The module {}:{}:{}:{} was not found".format(
                        module_name,
                        module_info["stream"],
                        module_info["version"],
                        module_info["context"],
                    ))
            dep_mmd = build.mmd()

            # Take note of what rpms are in this dep's profile
            for key in keys:
                profile = dep_mmd.get_profile(key)
                if profile:
                    results[key] |= set(profile.get_rpms())

        # Return the union of all rpms in all profiles of the given keys
        return results
Exemple #7
0
 def _run_without_retry(cmd, chdir=None, log_stdout=False):
     proc = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE, cwd=chdir)
     stdout, stderr = proc.communicate()
     if log_stdout and stdout:
         log.debug(stdout)
     if stderr:
         log.warning(stderr)
     if proc.returncode != 0:
         raise UnprocessableEntity(
             "Failed on %r, retcode %r, out %r, err %r" %
             (cmd, proc.returncode, stdout, stderr))
     return proc.returncode, stdout, stderr
Exemple #8
0
    def get_full_commit_hash(self, commit_hash=None):
        """
        Takes a shortened commit hash and returns the full hash
        :param commit_hash: a shortened commit hash. If not specified, the
        one in the URL will be used
        :return: string of the full commit hash
        """
        if commit_hash:
            commit_to_check = commit_hash
        elif self._commit:
            commit_to_check = self._commit
        else:
            try:
                # If self._commit was None, then calling `self.commit` will resolve the ref based
                # on the branch
                return self.commit
            except UnprocessableEntity:
                # If there was an exception resolving the ref based on the branch (could be the
                # default branch that doesn't exist), then there is not enough information to get
                # the commit hash
                raise RuntimeError(
                    'No commit hash was specified for "{0}"'.format(self.url))

        if self.scheme == "git":
            log.debug("Getting the full commit hash on %s from %s",
                      self.repository, commit_to_check)
            td = None
            try:
                td = tempfile.mkdtemp()
                SCM._run(["git", "clone", "-q", self.repository, td, "--bare"])
                cmd = ["git", "rev-parse", commit_to_check]
                log.debug("Running `%s` to get the full commit hash for %s",
                          " ".join(cmd), commit_to_check)
                output = SCM._run(cmd, chdir=td)[1]
            finally:
                if td and os.path.exists(td):
                    shutil.rmtree(td)

            if output:
                return str(output.decode("utf-8").strip("\n"))

            raise UnprocessableEntity(
                'The full commit hash of "{0}" for "{1}" could not be found'.
                format(commit_hash, self.repository))
        else:
            raise RuntimeError("get_full_commit_hash: Unhandled SCM scheme.")
    def get_module(self,
                   name,
                   stream,
                   version,
                   context,
                   state=models.BUILD_STATES["ready"],
                   strict=False):
        mb = models.ModuleBuild.get_build_from_nsvc(self.db_session,
                                                    name,
                                                    stream,
                                                    version,
                                                    context,
                                                    state=state)
        if mb:
            return mb.extended_json(self.db_session)

        if strict:
            raise UnprocessableEntity(
                "Cannot find any module builds for %s:%s" % (name, stream))
Exemple #10
0
    def get_module_yaml(self):
        """
        Get full path to the module's YAML file.

        :return: path as a string
        :raises UnprocessableEntity
        """
        if not self.sourcedir:
            raise ProgrammingError("Do .checkout() first.")

        path_to_yaml = os.path.join(self.sourcedir, (self.name + ".yaml"))
        try:
            with open(path_to_yaml):
                return path_to_yaml
        except IOError:
            log.error(
                "get_module_yaml: The SCM repository doesn't contain a modulemd file. "
                "Couldn't access: %s" % path_to_yaml)
            raise UnprocessableEntity(
                "The SCM repository doesn't contain a modulemd file")
    def get_module_modulemds(self,
                             name,
                             stream,
                             version=None,
                             context=None,
                             strict=False):
        """
        Gets the module modulemds from the resolver.
        :param name: a string of the module's name
        :param stream: a string of the module's stream
        :param version: a string or int of the module's version. When None, latest version will
            be returned.
        :param context: a string of the module's context. When None, all contexts will
            be returned.
        :kwarg strict: Normally this function returns [] if no module can be
            found.  If strict=True, then a UnprocessableEntity is raised.
        :return: List of Modulemd metadata instances matching the query
        """
        if version and context:
            mmd = self.get_module(name,
                                  stream,
                                  version,
                                  context,
                                  strict=strict)
            if mmd is None:
                return
            return [load_mmd(mmd["modulemd"])]

        if not version and not context:
            builds = models.ModuleBuild.get_last_builds_in_stream(
                self.db_session, name, stream)
        else:
            raise NotImplementedError(
                "This combination of name/stream/version/context is not implemented"
            )

        if not builds and strict:
            raise UnprocessableEntity(
                "Cannot find any module builds for %s:%s" % (name, stream))
        return [build.mmd() for build in builds]
    def get_module_build_dependencies(self,
                                      name=None,
                                      stream=None,
                                      version=None,
                                      context=None,
                                      mmd=None,
                                      strict=False):
        """
        Returns a dictionary of koji_tag:[mmd, ...] of all the dependencies of input module.

        Although it is expected that single Koji tag always contain just single module build,
        it does not have to be a true for Offline local builds which use the local repository
        identifier as `koji_tag`.

        :kwarg name: a string of a module's name (required if mmd is not set)
        :kwarg stream: a string of a module's stream (required if mmd is not set)
        :kwarg version: a string of a module's version (required if mmd is not set)
        :kwarg context: a string of a module's context (required if mmd is not set)
        :kwarg mmd: Modulemd.ModuleStream object. If this is set, the mmd will be used instead of
            querying the DB with the name, stream, version, and context.
        :kwarg strict: Normally this function returns None if no module can be
            found.  If strict=True, then an UnprocessableEntity is raised.
        :return: a dictionary
        """
        if mmd:
            log.debug(
                "get_module_build_dependencies(mmd={0!r} strict={1!r})".format(
                    mmd, strict))
        elif any(x is None for x in [name, stream, version, context]):
            raise RuntimeError(
                "The name, stream, version, and/or context weren't specified")
        else:
            version = str(version)
            log.debug(
                "get_module_build_dependencies({0}, strict={1!r})".format(
                    ", ".join([name, stream,
                               str(version), context]), strict))

        module_tags = {}
        if mmd:
            queried_mmd = mmd
            nsvc = ":".join([
                mmd.get_module_name(),
                mmd.get_stream_name(),
                str(mmd.get_version()),
                mmd.get_context() or models.DEFAULT_MODULE_CONTEXT,
            ])
        else:
            build = models.ModuleBuild.get_build_from_nsvc(
                self.db_session, name, stream, version, context)
            if not build:
                raise UnprocessableEntity("The module {} was not found".format(
                    ":".join([name, stream, version, context])))
            queried_mmd = build.mmd()
            nsvc = ":".join([name, stream, version, context])

        xmd_mbs = queried_mmd.get_xmd().get("mbs", {})
        if "buildrequires" not in xmd_mbs:
            raise RuntimeError(
                "The module {} did not contain its modulemd or did not have "
                "its xmd attribute filled out in MBS".format(nsvc))

        buildrequires = xmd_mbs["buildrequires"]
        for br_name, details in buildrequires.items():
            build = models.ModuleBuild.get_build_from_nsvc(
                self.db_session,
                br_name,
                details["stream"],
                details["version"],
                details["context"],
                state=models.BUILD_STATES["ready"],
            )
            if not build:
                raise RuntimeError(
                    "Buildrequired module %s %r does not exist in MBS db" %
                    (br_name, details))

            # If the buildrequire is a meta-data only module with no Koji tag set, then just
            # skip it
            if build.koji_tag is None:
                continue
            module_tags.setdefault(build.koji_tag, [])
            module_tags[build.koji_tag].append(build.mmd())

        return module_tags
    def resolve_requires(self, requires):
        """
        Resolves the requires list of N:S or N:S:V:C to a dictionary with keys as
        the module name and the values as a dictionary with keys of ref,
        stream, version.
        If there are some modules loaded by utils.load_local_builds(...), these
        local modules will be considered when resolving the requires. A RuntimeError
        is raised on DB lookup errors.
        :param requires: a list of N:S or N:S:V:C strings
        :return: a dictionary
        """
        new_requires = {}
        for nsvc in requires:
            nsvc_splitted = nsvc.split(":")
            if len(nsvc_splitted) == 2:
                module_name, module_stream = nsvc_splitted
                module_version = None
                module_context = None
            elif len(nsvc_splitted) == 4:
                module_name, module_stream, module_version, module_context = nsvc_splitted
            else:
                raise ValueError(
                    "Only N:S or N:S:V:C is accepted by resolve_requires, got %s"
                    % nsvc)

            local_modules = models.ModuleBuild.local_modules(
                self.db_session, module_name, module_stream)
            if local_modules:
                local_build = local_modules[0]
                new_requires[module_name] = {
                    "ref": None,
                    "stream": local_build.stream,
                    "version": local_build.version,
                    "context": local_build.context,
                    "koji_tag": local_build.koji_tag,
                }
                continue

            if module_version is None or module_context is None:
                build = models.ModuleBuild.get_last_build_in_stream(
                    self.db_session, module_name, module_stream)
            else:
                build = models.ModuleBuild.get_build_from_nsvc(
                    self.db_session, module_name, module_stream,
                    module_version, module_context)

            if not build:
                raise UnprocessableEntity(
                    "The module {} was not found".format(nsvc))

            for sibling_id in build.siblings(self.db_session):
                sibling_build = models.ModuleBuild.get_by_id(
                    self.db_session, sibling_id)
                if sibling_build.state not in (models.BUILD_STATES["ready"],
                                               models.BUILD_STATES["failed"]):
                    raise UnprocessableEntity(
                        'Buildrequire {}-{}-{} is in "{}" state'.format(
                            sibling_build.name, sibling_build.stream,
                            sibling_build.version,
                            models.INVERSE_BUILD_STATES[sibling_build.state]))

            commit_hash = None
            mmd = build.mmd()
            mbs_xmd = mmd.get_xmd().get("mbs", {})
            if mbs_xmd.get("commit"):
                commit_hash = mbs_xmd["commit"]
            else:
                raise RuntimeError(
                    'The module "{0}" didn\'t contain a commit hash in its xmd'
                    .format(module_name))

            if not mbs_xmd.get("mse"):
                raise RuntimeError(
                    'The module "{}" is not built using Module Stream Expansion. '
                    "Please rebuild this module first".format(nsvc))

            new_requires[module_name] = {
                "ref": commit_hash,
                "stream": module_stream,
                "version": build.version,
                "context": build.context,
                "koji_tag": build.koji_tag,
            }

        return new_requires
Exemple #14
0
def import_mmd(db_session, mmd, check_buildrequires=True):
    """
    Imports new module build defined by `mmd` to MBS database using `session`.
    If it already exists, it is updated.

    The ModuleBuild.koji_tag is set according to xmd['mbs]['koji_tag'].
    The ModuleBuild.state is set to "ready".
    The ModuleBuild.rebuild_strategy is set to "all".
    The ModuleBuild.owner is set to "mbs_import".

    :param db_session: SQLAlchemy session object.
    :param mmd: module metadata being imported into database.
    :type mmd: Modulemd.ModuleStream
    :param bool check_buildrequires: When True, checks that the buildrequires defined in the MMD
        have matching records in the `mmd["xmd"]["mbs"]["buildrequires"]` and also fills in
        the `ModuleBuild.buildrequires` according to this data.
    :return: module build (ModuleBuild),
             log messages collected during import (list)
    :rtype: tuple
    """
    from module_build_service.common import models

    xmd = mmd.get_xmd()
    # Set some defaults in xmd["mbs"] if they're not provided by the user
    if "mbs" not in xmd:
        xmd["mbs"] = {"mse": True}

    if not mmd.get_context():
        mmd.set_context(models.DEFAULT_MODULE_CONTEXT)

    # NSVC is used for logging purpose later.
    nsvc = mmd.get_nsvc()
    if nsvc is None:
        msg = "Both the name and stream must be set for the modulemd being imported."
        log.error(msg)
        raise UnprocessableEntity(msg)

    name = mmd.get_module_name()
    stream = mmd.get_stream_name()
    version = str(mmd.get_version())
    context = mmd.get_context()

    xmd_mbs = xmd["mbs"]

    disttag_marking = xmd_mbs.get("disttag_marking")

    # If it is a base module, then make sure the value that will be used in the RPM disttags
    # doesn't contain a dash since a dash isn't allowed in the release field of the NVR
    if name in conf.base_module_names:
        if disttag_marking and "-" in disttag_marking:
            msg = "The disttag_marking cannot contain a dash"
            log.error(msg)
            raise UnprocessableEntity(msg)
        if not disttag_marking and "-" in stream:
            msg = "The stream cannot contain a dash unless disttag_marking is set"
            log.error(msg)
            raise UnprocessableEntity(msg)

    virtual_streams = xmd_mbs.get("virtual_streams", [])

    # Verify that the virtual streams are the correct type
    if virtual_streams and (
        not isinstance(virtual_streams, list)
        or any(not isinstance(vs, string_types) for vs in virtual_streams)
    ):
        msg = "The virtual streams must be a list of strings"
        log.error(msg)
        raise UnprocessableEntity(msg)

    if check_buildrequires:
        deps = mmd.get_dependencies()
        if len(deps) > 1:
            raise UnprocessableEntity(
                "The imported module's dependencies list should contain just one element")

        if "buildrequires" not in xmd_mbs:
            # Always set buildrequires if it is not there, because
            # get_buildrequired_base_modules requires xmd/mbs/buildrequires exists.
            xmd_mbs["buildrequires"] = {}
            mmd.set_xmd(xmd)

        if len(deps) > 0:
            brs = set(deps[0].get_buildtime_modules())
            xmd_brs = set(xmd_mbs["buildrequires"].keys())
            if brs - xmd_brs:
                raise UnprocessableEntity(
                    "The imported module buildrequires other modules, but the metadata in the "
                    'xmd["mbs"]["buildrequires"] dictionary is missing entries'
                )

    if "koji_tag" not in xmd_mbs:
        log.warning("'koji_tag' is not set in xmd['mbs'] for module {}".format(nsvc))
        log.warning("koji_tag will be set to None for imported module build.")

    # Log messages collected during import
    msgs = []

    # Get the ModuleBuild from DB.
    build = models.ModuleBuild.get_build_from_nsvc(db_session, name, stream, version, context)
    if build:
        msg = "Updating existing module build {}.".format(nsvc)
        log.info(msg)
        msgs.append(msg)
    else:
        build = models.ModuleBuild()
        db_session.add(build)

    build.name = name
    build.stream = stream
    build.version = version
    build.koji_tag = xmd_mbs.get("koji_tag")
    build.state = models.BUILD_STATES["ready"]
    build.modulemd = mmd_to_str(mmd)
    build.context = context
    build.owner = "mbs_import"
    build.rebuild_strategy = "all"
    now = datetime.utcnow()
    build.time_submitted = now
    build.time_modified = now
    build.time_completed = now
    if build.name in conf.base_module_names:
        build.stream_version = models.ModuleBuild.get_stream_version(stream)

    # Record the base modules this module buildrequires
    if check_buildrequires:
        for base_module in build.get_buildrequired_base_modules(db_session):
            if base_module not in build.buildrequires:
                build.buildrequires.append(base_module)

    build.update_virtual_streams(db_session, virtual_streams)

    db_session.commit()

    msg = "Module {} imported".format(nsvc)
    log.info(msg)
    msgs.append(msg)

    return build, msgs
    def get_module_modulemds(
        self,
        name,
        stream,
        version=None,
        context=None,
        strict=False,
        stream_version_lte=False,
        virtual_streams=None,
        states=None,
    ):
        """
        Gets the module modulemds from the resolver.
        :param name: a string of the module's name
        :param stream: a string of the module's stream
        :param version: a string or int of the module's version. When None, latest version will
            be returned.
        :param context: a string of the module's context. When None, all contexts will
            be returned.
        :kwarg strict: Normally this function returns [] if no module can be
            found.  If strict=True, then a UnprocessableEntity is raised.
        :kwarg stream_version_lte: If True and if the `stream` can be transformed to
            "stream version", the returned list will include all the modules with stream version
            less than or equal the stream version computed from `stream`.
        :kwarg virtual_streams: a list of the virtual streams to filter on. The filtering uses "or"
            logic. When falsy, no filtering occurs.
        :return: List of Modulemd metadata instances matching the query
        """
        yaml = None

        local_modules = models.ModuleBuild.local_modules(
            self.db_session, name, stream)
        if local_modules:
            return [m.mmd() for m in local_modules]

        extra_args = {}
        if stream_version_lte and (len(
                str(
                    models.ModuleBuild.get_stream_version(
                        stream, right_pad=False))) >= 5):
            stream_version = models.ModuleBuild.get_stream_version(stream)
            extra_args["stream_version_lte"] = stream_version

        if virtual_streams:
            extra_args["virtual_stream"] = virtual_streams

        modules = self._get_modules(name,
                                    stream,
                                    version,
                                    context,
                                    strict=strict,
                                    states=states,
                                    **extra_args)
        if not modules:
            return []

        mmds = []
        for module in modules:
            if module:
                yaml = module["modulemd"]

            if not yaml:
                if strict:
                    raise UnprocessableEntity(
                        "Failed to find modulemd entry in MBS for %r" % module)
                else:
                    return None

            mmds.append(load_mmd(yaml))
        return mmds
Exemple #16
0
def record_component_builds(mmd,
                            module,
                            initial_batch=1,
                            previous_buildorder=None,
                            main_mmd=None):
    # Imported here to allow import of utils in GenericBuilder.
    from module_build_service.builder import GenericBuilder

    # When main_mmd is set, merge the metadata from this mmd to main_mmd,
    # otherwise our current mmd is main_mmd.
    if main_mmd:
        # Check for components that are in both MMDs before merging since MBS
        # currently can't handle that situation.
        main_mmd_rpms = main_mmd.get_rpm_component_names()
        mmd_rpms = mmd.get_rpm_component_names()
        duplicate_components = [
            rpm for rpm in main_mmd_rpms if rpm in mmd_rpms
        ]
        if duplicate_components:
            error_msg = (
                'The included module "{0}" in "{1}" have the following '
                "conflicting components: {2}".format(
                    mmd.get_module_name(), main_mmd.get_module_name(),
                    ", ".join(duplicate_components)))
            raise UnprocessableEntity(error_msg)
        merge_included_mmd(main_mmd, mmd)
    else:
        main_mmd = mmd

    # If the modulemd yaml specifies components, then submit them for build
    rpm_components = [
        mmd.get_rpm_component(name) for name in mmd.get_rpm_component_names()
    ]
    module_components = [
        mmd.get_module_component(name)
        for name in mmd.get_module_component_names()
    ]
    all_components = list(rpm_components) + list(module_components)
    if not all_components:
        return

    # Get map of packages that have SRPM overrides
    srpm_overrides = get_module_srpm_overrides(module)

    rpm_weights = GenericBuilder.get_build_weights(
        [c.get_name() for c in rpm_components])
    all_components.sort(key=lambda x: x.get_buildorder())
    # We do not start with batch = 0 here, because the first batch is
    # reserved for module-build-macros. First real components must be
    # planned for batch 2 and following.
    batch = initial_batch

    for component in all_components:
        # Increment the batch number when buildorder increases.
        if previous_buildorder != component.get_buildorder():
            previous_buildorder = component.get_buildorder()
            batch += 1

        # If the component is another module, we fetch its modulemd file
        # and record its components recursively with the initial_batch
        # set to our current batch, so the components of this module
        # are built in the right global order.
        if isinstance(component, Modulemd.ComponentModule):
            full_url = component.get_repository() + "?#" + component.get_ref()
            # It is OK to whitelist all URLs here, because the validity
            # of every URL have been already checked in format_mmd(...).
            included_mmd = fetch_mmd(full_url, whitelist_url=True)[0]
            format_mmd(included_mmd, module.scmurl, module, db_session,
                       srpm_overrides)
            batch = record_component_builds(included_mmd, module, batch,
                                            previous_buildorder, main_mmd)
            continue

        package = component.get_name()
        if package in srpm_overrides:
            component_ref = None
            full_url = srpm_overrides[package]
            log.info('Building custom SRPM "{0}"'
                     " for package {1}".format(full_url, package))
        else:
            component_ref = mmd.get_xmd()["mbs"]["rpms"][package]["ref"]
            full_url = component.get_repository() + "?#" + component_ref

        # Skip the ComponentBuild if it already exists in database. This can happen
        # in case of module build resubmition.
        existing_build = models.ComponentBuild.from_component_name(
            db_session, package, module.id)
        if existing_build:
            # Check that the existing build has the same most important attributes.
            # This should never be a problem, but it's good to be defensive here so
            # we do not mess things during resubmition.
            if (existing_build.batch != batch
                    or existing_build.scmurl != full_url
                    or existing_build.ref != component_ref):
                raise ValidationError(
                    "Component build %s of module build %s (id: %d) already "
                    "exists in database, but its attributes are different from"
                    " resubmitted one." %
                    (component.get_name(), module.name, module.id))
            continue

        build = models.ComponentBuild(module_id=module.id,
                                      package=package,
                                      format="rpms",
                                      scmurl=full_url,
                                      batch=batch,
                                      ref=component_ref,
                                      weight=rpm_weights[package],
                                      buildonly=component.get_buildonly())
        db_session.add(build)

    return batch
Exemple #17
0
def format_mmd(mmd, scmurl, module=None, db_session=None, srpm_overrides=None):
    """
    Prepares the modulemd for the MBS. This does things such as replacing the
    branches of components with commit hashes and adding metadata in the xmd
    dictionary.
    :param mmd: the Modulemd.ModuleStream object to format
    :param scmurl: the url to the modulemd
    :param module: When specified together with `session`, the time_modified
        of a module is updated regularly in case this method takes lot of time.
    :param db_session: Database session to update the `module`.
    :param dict srpm_overrides: Mapping of package names to SRPM links for all
        component packages which have custom SRPM overrides specified.
    """
    srpm_overrides = srpm_overrides or {}

    xmd = mmd.get_xmd()
    if "mbs" not in xmd:
        xmd["mbs"] = {}
    if "scmurl" not in xmd["mbs"]:
        xmd["mbs"]["scmurl"] = scmurl or ""
    if "commit" not in xmd["mbs"]:
        xmd["mbs"]["commit"] = ""

    # If module build was submitted via yaml file, there is no scmurl
    if scmurl:
        scm = module_build_service.common.scm.SCM(scmurl)
        # We want to make sure we have the full commit hash for consistency
        if module_build_service.common.scm.SCM.is_full_commit_hash(
                scm.scheme, scm.commit):
            full_scm_hash = scm.commit
        else:
            full_scm_hash = scm.get_full_commit_hash()

        xmd["mbs"]["commit"] = full_scm_hash

    if mmd.get_rpm_component_names() or mmd.get_module_component_names():
        if "rpms" not in xmd["mbs"]:
            xmd["mbs"]["rpms"] = {}
        # Add missing data in RPM components
        for pkgname in mmd.get_rpm_component_names():
            pkg = mmd.get_rpm_component(pkgname)
            # In case of resubmit of existing module which have been
            # cancelled/failed during the init state, the package
            # was maybe already handled by MBS, so skip it in this case.
            if pkgname in xmd["mbs"]["rpms"]:
                continue
            if pkg.get_repository() and not conf.rpms_allow_repository:
                raise Forbidden(
                    "Custom component repositories aren't allowed.  "
                    "%r bears repository %r" % (pkgname, pkg.get_repository()))
            if pkg.get_cache() and not conf.rpms_allow_cache:
                raise Forbidden("Custom component caches aren't allowed.  "
                                "%r bears cache %r" %
                                (pkgname, pkg.get_cache()))
            if pkg.get_buildafter():
                raise ValidationError(
                    'The usage of "buildafter" is not yet supported')
            if not pkg.get_repository():
                pkg.set_repository(conf.rpms_default_repository + pkgname)
            if not pkg.get_cache():
                pkg.set_cache(conf.rpms_default_cache + pkgname)
            if not pkg.get_ref():
                pkg.set_ref("master")
            if not pkg.get_arches():
                for arch in conf.arches:
                    pkg.add_restricted_arch(arch)

        # Add missing data in included modules components
        for modname in mmd.get_module_component_names():
            mod = mmd.get_module_component(modname)
            if mod.get_repository() and not conf.modules_allow_repository:
                raise Forbidden("Custom module repositories aren't allowed.  "
                                "%r bears repository %r" %
                                (modname, mod.get_repository()))
            if not mod.get_repository():
                mod.set_repository(conf.modules_default_repository + modname)
            if not mod.get_ref():
                mod.set_ref("master")

        # Check that SCM URL is valid and replace potential branches in pkg refs
        # by real SCM hash and store the result to our private xmd place in modulemd.
        pool = ThreadPool(20)
        try:
            # Filter out the packages which we have already resolved in possible
            # previous runs of this method (can be caused by module build resubmition)
            # or which have custom SRPMs and shouldn't be resolved.
            pkgs_to_resolve = []
            for name in mmd.get_rpm_component_names():
                if name not in xmd["mbs"]["rpms"]:
                    if name in srpm_overrides:
                        # If this package has a custom SRPM, store an empty
                        # ref entry so no further verification takes place.
                        xmd["mbs"]["rpms"][name] = {"ref": None}
                    else:
                        pkgs_to_resolve.append(mmd.get_rpm_component(name))

            async_result = pool.map_async(_scm_get_latest, pkgs_to_resolve)

            # For modules with lot of components, the _scm_get_latest can take a lot of time.
            # We need to bump time_modified from time to time, otherwise poller could think
            # that module is stuck in "init" state and it would send fake "init" message.
            while not async_result.ready():
                async_result.wait(60)
                if module and db_session:
                    module.time_modified = datetime.utcnow()
                    db_session.commit()
            pkg_dicts = async_result.get()
        finally:
            pool.close()

        err_msg = ""
        for pkg_dict in pkg_dicts:
            if pkg_dict["error"]:
                err_msg += pkg_dict["error"] + "\n"
            else:
                pkg_name = pkg_dict["pkg_name"]
                pkg_ref = pkg_dict["pkg_ref"]
                xmd["mbs"]["rpms"][pkg_name] = {"ref": pkg_ref}
        if err_msg:
            raise UnprocessableEntity(err_msg)

    # Set the modified xmd back to the modulemd
    mmd.set_xmd(xmd)
def get_mmds_required_by_module_recursively(db_session,
                                            mmd,
                                            default_streams=None,
                                            raise_if_stream_ambigous=False):
    """
    Returns the list of Module metadata objects of all modules required while
    building the module defined by `mmd` module metadata. This presumes the
    module metadata streams are expanded using `expand_mse_streams(...)`
    method.

    This method finds out latest versions of all the build-requires of
    the `mmd` module and then also all contexts of these latest versions.

    For each build-required name:stream:version:context module, it checks
    recursively all the "requires" and finds the latest version of each
    required module and also all contexts of these latest versions.

    :param db_session: SQLAlchemy database session.
    :param dict default_streams: Dict in {module_name: module_stream, ...} format defining
        the default stream to choose for module in case when there are multiple streams to
        choose from.
    :param bool raise_if_stream_ambigous: When True, raises a StreamAmbigous exception in case
        there are multiple streams for some dependency of module and the module name is not
        defined in `default_streams`, so it is not clear which stream should be used.
    :rtype: list of Modulemd metadata
    :return: List of all modulemd metadata of all modules required to build
        the module `mmd`.
    """
    # We use dict with name:stream as a key and list with mmds as value.
    # That way, we can ensure we won't have any duplicate mmds in a resulting
    # list and we also don't waste resources on getting the modules we already
    # handled from DB.
    mmds = {}

    # Get the MMDs of all compatible base modules based on the buildrequires.
    base_module_mmds = get_base_module_mmds(db_session, mmd)
    if not base_module_mmds["ready"]:
        base_module_choices = " or ".join(conf.base_module_names)
        raise UnprocessableEntity(
            "None of the base module ({}) streams in the buildrequires section could be found"
            .format(base_module_choices))

    # Add base modules to `mmds`.
    for base_module in base_module_mmds["ready"]:
        ns = ":".join(
            [base_module.get_module_name(),
             base_module.get_stream_name()])
        mmds.setdefault(ns, [])
        mmds[ns].append(base_module)

    # The currently submitted module build must be built only against "ready" base modules,
    # but its dependencies might have been built against some old platform which is already
    # EOL ("garbage" state). In order to find such old module builds, we need to include
    # also EOL platform streams.
    all_base_module_mmds = base_module_mmds["ready"] + base_module_mmds[
        "garbage"]

    # Get all the buildrequires of the module of interest.
    for deps in mmd.get_dependencies():
        deps_dict = deps_to_dict(deps, 'buildtime')
        mmds = _get_mmds_from_requires(db_session, deps_dict, mmds, False,
                                       default_streams,
                                       raise_if_stream_ambigous,
                                       all_base_module_mmds)

    # Now get the requires of buildrequires recursively.
    for mmd_key in list(mmds.keys()):
        for mmd in mmds[mmd_key]:
            for deps in mmd.get_dependencies():
                deps_dict = deps_to_dict(deps, 'runtime')
                mmds = _get_mmds_from_requires(db_session, deps_dict, mmds,
                                               True, default_streams,
                                               raise_if_stream_ambigous,
                                               all_base_module_mmds)

    # Make single list from dict of lists.
    res = []
    for ns, mmds_list in mmds.items():
        if len(mmds_list) == 0:
            raise UnprocessableEntity("Cannot find any module builds for %s" %
                                      (ns))
        res += mmds_list
    return res