Ejemplo n.º 1
0
    def _download_source_modulemd(self, mmd, output_path):
        """
        Fetches the original source modulemd file from SCM URL stored in the
        XMD section of `mmd` and stores it to filename referenced by `output_path`.

        This method does nothing if SCM URL is not set in the `mmd`.

        :param Modulemd mmd: Modulemd instance.
        :param str output_path: Full path to file into which the original modulemd
            file will be stored.
        """
        xmd = mmd.get_xmd()
        commit = xmd.get("mbs", {}).get("commit")
        scmurl = xmd.get("mbs", {}).get("scmurl")
        if not commit or not scmurl:
            log.warning("%r: xmd['mbs'] does not contain 'commit' or 'scmurl'.", self.module)
            return

        td = None
        try:
            log.info("Fetching %s (%s) to get the source modulemd.yaml", scmurl, commit)
            td = tempfile.mkdtemp()
            scm = SCM(scmurl)
            scm.commit = commit
            scm.checkout(td)
            fn = scm.get_module_yaml()
            log.info("Writing source modulemd.yaml to %r" % output_path)
            shutil.copy(fn, output_path)
        finally:
            try:
                if td is not None:
                    shutil.rmtree(td)
            except Exception as e:
                log.warning("Failed to remove temporary directory {!r}: {}".format(td, str(e)))
Ejemplo n.º 2
0
    def _tag_cg_build(self):
        """
        Tags the Content Generator build to module.cg_build_koji_tag.
        """
        session = get_session(self.config)

        tag_name = self.module.cg_build_koji_tag
        if not tag_name:
            log.info(
                "%r: Not tagging Content Generator build, no cg_build_koji_tag set", self.module)
            return

        tag_names_to_try = [tag_name, self.config.koji_cg_default_build_tag]
        for tag in tag_names_to_try:
            log.info("Trying %s", tag)
            tag_info = session.getTag(tag)
            if tag_info:
                break

            log.info("%r: Tag %s not found in Koji, trying next one.", self.module, tag)

        if not tag_info:
            log.warning(
                "%r:, Not tagging Content Generator build, no available tag found, tried %r",
                self.module, tag_names_to_try,
            )
            return

        build = self._get_build()
        nvr = "%s-%s-%s" % (build["name"], build["version"], build["release"])

        log.info("Content generator build %s will be tagged as %s in Koji", nvr, tag)
        session.tagBuild(tag_info["id"], nvr)
Ejemplo n.º 3
0
    def check_gating(self, build):
        """
        Query decision to greenwave
        :param build: build object
        :type build: module_build_service.common.models.ModuleBuild
        :return: True if at least one GW response contains policies_satisfied set to true
        :rtype: bool
        """
        self.error_occurred = False
        try:
            versions = self.get_product_versions()
        except GreenwaveError:
            log.warning('An error occured while getting a product versions')
            self.error_occurred = True
            return False

        for ver in versions:
            try:
                if self.query_decision(build, ver)["policies_satisfied"]:
                    # at least one positive result is enough
                    return True
            except (KeyError, GreenwaveError) as exc:
                self.error_occurred = True
                log.warning('Incorrect greenwave result "%s", ignoring',
                            str(exc))

        return False
Ejemplo n.º 4
0
def find_build_tags_from_external_repos(koji_session, repo_infos):
    """Find build tags from external repos

    An external repo added to a tag could be an arbitrary external repository.
    Hence, this method tries best to guess the tags from each external repo's
    URL by a regular expression.

    :param repo_infos: list of mappings represeting external repos information.
    :type repo_infos: list[dict]
    :return: a list of tag names.
    :rtype: list[str]
    """
    re_external_repo_url = r"^{}/repos/(.+-build)/latest/\$arch/?$".format(
        conf.koji_external_repo_url_prefix.rstrip("/"))
    tag_names = []
    for info in repo_infos:
        match = re.match(re_external_repo_url, info["url"])
        if match:
            name = match.groups()[0]
            if koji_session.getTag(name) is None:
                log.warning(
                    "Ignoring the found tag %s because no tag info was found with this name.",
                    name,
                )
            else:
                tag_names.append(name)
        else:
            log.warning(
                "The build tag could not be parsed from external repo %s whose url is %s.",
                info["external_repo_name"],
                info["url"],
            )
    return tag_names
Ejemplo n.º 5
0
def _in_memory_publish(topic, msg, conf, service):
    """ Puts the message into the in memory work queue. """
    # Increment the message ID.
    global _in_memory_msg_id
    _in_memory_msg_id += 1

    # Create fake fedmsg from the message so we can reuse
    # the BaseMessage.from_fedmsg code to get the particular BaseMessage
    # class instance.
    wrapped_msg = FedmsgMessageParser(known_fedmsg_services).parse({
        "msg_id": str(_in_memory_msg_id),
        "topic": service + "." + topic,
        "msg": msg
    })

    # Put the message to queue.
    from module_build_service.scheduler.consumer import work_queue_put

    try:
        work_queue_put(wrapped_msg)
    except ValueError as e:
        log.warning("No MBSConsumer found.  Shutting down?  %r" % e)
    except AttributeError:
        # In the event that `moksha.hub._hub` hasn't yet been initialized, we
        # need to store messages on the side until it becomes available.
        # As a last-ditch effort, try to hang initial messages in the config.
        log.warning("Hub not initialized.  Queueing on the side.")
        _initial_messages.append(wrapped_msg)
Ejemplo n.º 6
0
    def compute_weights_from_build_time(cls, components, arches=None):
        """
        Computes the weights of ComponentBuilds based on average time to build
        and list of arches for which the component is going to be built.

        This method should be used as a fallback only when KojiModuleBuilder
        cannot be used, because the weight this method produces is not 100% accurate.

        :param components: List of comopnent names to compute the weight for.
        :param arches: List of arches to build for or None. If the value is None,
            conf.arches will be used instead.
        :rtype: dict
        :return: {component_name: weight_as_float, ...}
        """
        if not arches:
            arches = conf.arches

        weights = {}

        for component in components:
            average_time_to_build = cls.get_average_build_time(component)

            # The way how `weight` is computed is based on hardcoded weight values
            # in kojid.py.
            # The weight computed here is not 100% accurate, because there are
            # multiple smaller tasks in koji like waitrepo or createrepo and we
            # cannot say if they will be executed as part of this component build.
            # The weight computed here is used only to limit the number of builds
            # and we generally do not care about waitrepo/createrepo weights in MBS.

            # 1.5 is what Koji hardcodes as a default weight for BuildArchTask.
            weight = 1.5
            if not average_time_to_build:
                weights[component] = weight
                continue

            if average_time_to_build < 0:
                log.warning(
                    "Negative average build duration for component %s: %s",
                    component,
                    str(average_time_to_build),
                )
                weights[component] = weight
                continue

            # Increase the task weight by 0.75 for every hour of build duration.
            adj = average_time_to_build / ((60 * 60) / 0.75)
            # cap the adjustment at +4.5
            weight += min(4.5, adj)

            # We build for all arches, so multiply the weight by number of arches.
            weight = weight * len(arches)

            # 1.5 here is hardcoded Koji weight of single BuildSRPMFromSCMTask
            weight += 1.5
            weights[component] = weight

        return weights
Ejemplo n.º 7
0
 def inner(*args, **kwargs):
     start = time.time()
     while True:
         try:
             return function(*args, **kwargs)
         except wait_on as e:
             log.warning("Exception %r raised from %r.  Retry in %rs" %
                         (e, function, interval))
             time.sleep(interval)
             if (time.time() - start) >= timeout:
                 raise  # This re-raises the last exception.
Ejemplo n.º 8
0
 def _run_without_retry(cmd, chdir=None, log_stdout=False):
     proc = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE, cwd=chdir)
     stdout, stderr = proc.communicate()
     if log_stdout and stdout:
         log.debug(stdout)
     if stderr:
         log.warning(stderr)
     if proc.returncode != 0:
         raise UnprocessableEntity(
             "Failed on %r, retcode %r, out %r, err %r" %
             (cmd, proc.returncode, stdout, stderr))
     return proc.returncode, stdout, stderr
Ejemplo n.º 9
0
def resolve_base_module_virtual_streams(db_session, name, streams):
    """
    Resolve any base module virtual streams and return a copy of `streams` with the resolved values.

    :param str name: the module name
    :param str streams: the streams to resolve
    :return: the resolved streams
    :rtype: list
    """
    from module_build_service.resolver import GenericResolver
    resolver = GenericResolver.create(db_session, conf)

    if name not in conf.base_module_names:
        return streams

    new_streams = copy.deepcopy(streams)
    for i, stream in enumerate(streams):
        # Ignore streams that start with a minus sign, since those are handled in the
        # MSE code
        if stream.startswith("-"):
            continue

        # Check if the base module stream is available
        log.debug('Checking to see if the base module "%s:%s" is available', name, stream)
        if resolver.get_module_count(name=name, stream=stream) > 0:
            continue

        # If the base module stream is not available, check if there's a virtual stream
        log.debug(
            'Checking to see if there is a base module "%s" with the virtual stream "%s"',
            name, stream,
        )
        base_module_mmd = resolver.get_latest_with_virtual_stream(
            name=name, virtual_stream=stream
        )
        if not base_module_mmd:
            # If there isn't this base module stream or virtual stream available, skip it,
            # and let the dep solving code deal with it like it normally would
            log.warning(
                'There is no base module "%s" with stream/virtual stream "%s"',
                name, stream,
            )
            continue

        latest_stream = base_module_mmd.get_stream_name()
        log.info(
            'Replacing the buildrequire "%s:%s" with "%s:%s", since "%s" is a virtual '
            "stream",
            name, stream, name, latest_stream, stream
        )
        new_streams[i] = latest_stream

    return new_streams
Ejemplo n.º 10
0
def import_builds_from_local_dnf_repos(platform_id=None):
    """
    Imports the module builds from all available local repositories to MBS DB.

    This is used when building modules locally without any access to MBS infra.
    This method also generates and imports the base module according to /etc/os-release.

    :param str platform_id: The `name:stream` of a fake platform module to generate in this
        method. When not set, the /etc/os-release is parsed to get the PLATFORM_ID.
    """
    log.info("Loading available RPM repositories.")
    dnf_base = dnf.Base()
    dnf_base.read_all_repos()

    log.info("Importing available modules to MBS local database.")
    for repo in dnf_base.repos.values():
        try:
            repo.load()
        except Exception as e:
            log.warning(str(e))
            continue
        mmd_data = repo.get_metadata_content("modules")
        mmd_index = Modulemd.ModuleIndex.new()
        ret, _ = mmd_index.update_from_string(mmd_data, True)
        if not ret:
            log.warning("Loading the repo '%s' failed", repo.name)
            continue

        for module_name in mmd_index.get_module_names():
            for mmd in mmd_index.get_module(module_name).get_all_streams():
                xmd = mmd.get_xmd()
                xmd["mbs"] = {}
                xmd["mbs"]["koji_tag"] = "repofile://" + repo.repofile
                xmd["mbs"]["mse"] = True
                xmd["mbs"]["commit"] = "unknown"
                mmd.set_xmd(xmd)

                import_mmd(db_session, mmd, False)

    if not platform_id:
        # Parse the /etc/os-release to find out the local platform:stream.
        with open("/etc/os-release", "r") as fd:
            for l in fd.readlines():
                if not l.startswith("PLATFORM_ID"):
                    continue
                platform_id = l.split("=")[1].strip("\"' \n")
    if not platform_id:
        raise ValueError("Cannot get PLATFORM_ID from /etc/os-release.")

    # Create the fake platform:stream:1:000000 module to fulfill the
    # dependencies for local offline build and also to define the
    # srpm-buildroot and buildroot.
    import_fake_base_module("%s:1:000000" % platform_id)
Ejemplo n.º 11
0
def get_module_srpm_overrides(module):
    """
    Make necessary preparations to use any provided custom SRPMs.

    :param module: ModuleBuild object representing the module being submitted.
    :type module: :class:`models.ModuleBuild`
    :return: mapping of package names to SRPM links for all packages which
             have custom SRPM overrides specified
    :rtype: dict[str, str]

    """
    overrides = {}

    if not module.srpms:
        return overrides

    try:
        # Make sure we can decode the custom SRPM list
        srpms = json.loads(module.srpms)
        assert isinstance(srpms, list)
    except Exception:
        raise ValueError("Invalid srpms list encountered: {}".format(
            module.srpms))

    for source in srpms:
        if source.startswith("cli-build/") and source.endswith(".src.rpm"):
            # This is a custom srpm that has been uploaded to koji by rpkg
            # using the package name as the basename suffixed with .src.rpm
            rpm_name = os.path.basename(source)[:-len(".src.rpm")]
        else:
            # This should be a local custom srpm path
            if not os.path.exists(source):
                raise IOError("Provided srpm is missing: {}".format(source))
            # Get package name from rpm headers
            try:
                rpm_hdr = kobo.rpmlib.get_rpm_header(source)
                rpm_name = to_text_type(
                    kobo.rpmlib.get_header_field(rpm_hdr, "name"))
            except Exception:
                raise ValueError("Provided srpm is invalid: {}".format(source))

        if rpm_name in overrides:
            log.warning(
                'Encountered duplicate custom SRPM "{0}" for package {1}'.
                format(source, rpm_name))
            continue

        log.debug('Using custom SRPM "{0}" for package {1}'.format(
            source, rpm_name))
        overrides[rpm_name] = source

    return overrides
Ejemplo n.º 12
0
def detect_arch():
    """
    Helper method to detect the local host architecture. Fallbacks to `conf.arch_fallback`.
    """
    if conf.arch_autodetect:
        arch_detected = platform.machine()
        if arch_detected:
            return arch_detected

        log.warning(
            "Couldn't determine machine arch. Falling back to configured arch."
        )

    return conf.arch_fallback
Ejemplo n.º 13
0
    def _filter_based_on_real_stream_name(self, koji_session, module_builds, stream):
        """
        Query Koji for real stream name of each module and keep only those matching `stream`.

        This needs to be done, because MBS stores the stream name in the "version" field in Koji,
        but the "version" field cannot contain "-" character. Therefore MBS replaces all "-"
        with "_". This makes it impossible to reconstruct the original stream name from the
        "version" field.

        We therefore need to ask for real original stream name here and filter out modules based
        on this real stream name.

        :param KojiSession koji_session: Koji session.
        :param list module_builds: List of builds as returned by KojiSession.listTagged method.
        :param str stream: The requested stream name.
        :return list: Filtered list of builds.
        """
        # Return early if there are no module builds.
        if not module_builds:
            return []

        # Prepare list of build ids to pass them to Koji multicall later.
        build_ids = [b["build_id"] for b in module_builds]

        # Get the Koji builds from Koji.
        koji_builds = koji_multicall_map(koji_session, koji_session.getBuild, build_ids)
        if not koji_builds:
            raise RuntimeError("Error during Koji multicall when filtering KojiResolver builds.")

        # Filter out modules with different stream in the Koji build metadata.
        ret = []
        for module_build, koji_build in zip(module_builds, koji_builds):
            koji_build_stream = koji_build.get("extra", {}).get("typeinfo", {}).get("module", {}).\
                get("stream")
            if not koji_build_stream:
                log.warning(
                    "Not filtering out Koji build with id %d - it has no \"stream\" set in its "
                    "metadata." % koji_build["build_id"])
                ret.append(module_build)
                continue

            if koji_build_stream == stream:
                ret.append(module_build)
            else:
                log.info(
                    "Filtering out Koji build %d - its stream \"%s\" does not match the requested "
                    "stream \"%s\"" % (koji_build["build_id"], stream, koji_build_stream))

        return ret
Ejemplo n.º 14
0
    def test_module_build_logs(self):
        """
        Tests that ModuleBuildLogs is logging properly to build log file.
        """
        build = models.ModuleBuild.get_by_id(db_session, 2)

        # Initialize logging, get the build log path and remove it to
        # ensure we are not using some garbage from previous failed test.
        self.build_log.start(db_session, build)
        path = self.build_log.path(db_session, build)
        assert path[len(self.base):] == "/build-2.log"
        if os.path.exists(path):
            os.unlink(path)

        # Try logging without the MBSConsumer.current_module_build_id set.
        # No log file should be created.
        log.debug("ignore this test msg")
        log.info("ignore this test msg")
        log.warning("ignore this test msg")
        log.error("ignore this test msg")
        self.build_log.stop(build)
        assert not os.path.exists(path)

        # Try logging with current_module_build_id set to 2 and then to 2.
        # Only messages with current_module_build_id set to 2 should appear in
        # the log.
        self.build_log.start(db_session, build)
        MBSConsumer.current_module_build_id = 1
        log.debug("ignore this test msg1")
        log.info("ignore this test msg1")
        log.warning("ignore this test msg1")
        log.error("ignore this test msg1")

        MBSConsumer.current_module_build_id = 2
        log.debug("ignore this test msg2")
        log.info("ignore this test msg2")
        log.warning("ignore this test msg2")
        log.error("ignore this test msg2")

        self.build_log.stop(build)
        assert os.path.exists(path)
        with open(path, "r") as f:
            data = f.read()
            # Note that DEBUG is not present unless configured server-wide.
            for level in ["INFO", "WARNING", "ERROR"]:
                assert data.find(
                    "MBS - {0} - ignore this test msg2".format(level)) != -1

        # Try to log more messages when build_log for module 1 is stopped.
        # New messages should not appear in a log.
        MBSConsumer.current_module_build_id = 2
        log.debug("ignore this test msg3")
        log.info("ignore this test msg3")
        log.warning("ignore this test msg3")
        log.error("ignore this test msg3")
        self.build_log.stop(build)
        with open(path, "r") as f:
            data = f.read()
            assert data.find("ignore this test msg3") == -1
Ejemplo n.º 15
0
    def get_compatible_base_module_modulemds(self, base_module_mmd,
                                             stream_version_lte,
                                             virtual_streams, states):
        """
        Returns the Modulemd metadata of base modules compatible with base module
        defined by `name` and `stream`. The compatibility is found out using the
        stream version in case the stream is in "x.y.z" format and is limited to
        single major version of stream version.

        If `virtual_streams` are defined, the compatibility is also extended to
        all base module streams which share the same virtual stream.

        :param base_module_mmd: Modulemd medatada defining the input base module.
        :param stream_version_lte: If True, the compatible streams are limited
             by the stream version computed from `stream`. If False, even the
             modules with higher stream version are returned.
        :param virtual_streams: List of virtual streams. If set, also modules
            with incompatible stream version are returned in case they share
            one of the virtual streams.
        :param states: List of states the returned compatible modules should
            be in.
        :return list: List of Modulemd objects.
        """
        name = base_module_mmd.get_module_name()
        stream = base_module_mmd.get_stream_name()
        builds = []
        stream_version = None
        if stream_version_lte:
            stream_in_xyz_format = len(
                str(
                    models.ModuleBuild.get_stream_version(
                        stream, right_pad=False))) >= 5
            if stream_in_xyz_format:
                stream_version = models.ModuleBuild.get_stream_version(stream)
            else:
                log.warning(
                    "Cannot get compatible base modules, because stream_version_lte is used, "
                    "but stream %r is not in x.y.z format." % stream)
        builds = models.ModuleBuild.get_last_builds_in_stream_version_lte(
            self.db_session, name, stream_version, virtual_streams, states)

        return [build.mmd() for build in builds]
Ejemplo n.º 16
0
def get_modulemds_from_ursine_content(tag):
    """Get all modules metadata which were added to ursine content

    Ursine content is the tag inheritance managed by Ursa-Major by adding
    specific modules' koji_tag.

    Background of module build based on ursine content:

    Each module build buildrequires a platform module, which is a presudo-module
    used to connect to an external repository whose packages will be present
    in the buildroot. In practice, the external repo is generated from a build
    tag which could inherit from a few module koji_tags so that those module's
    RPMs could be build dependencies for some specific packages.

    So, this function is to find out all module koji_tags from the build tag
    and return corresponding module metadata.

    :param str tag: a base module's koji_tag.
    :return: list of module metadata. Empty list will be returned if no ursine
        modules metadata is found.
    :rtype: list[Modulemd.Module]
    """
    resolver = GenericResolver.create(db_session, conf)

    koji_session = get_session(conf, login=False)
    repos = koji_session.getExternalRepoList(tag)
    build_tags = find_build_tags_from_external_repos(koji_session, repos)
    if not build_tags:
        log.debug("No external repo containing ursine content is found.")
        return []
    modulemds = []
    for tag in build_tags:
        koji_tags = find_module_koji_tags(koji_session, tag)
        for koji_tag in koji_tags:
            md = resolver.get_modulemd_by_koji_tag(koji_tag)
            if md:
                modulemds.append(md)
            else:
                log.warning("No module is found by koji_tag '%s'", koji_tag)
    return modulemds
Ejemplo n.º 17
0
    def koji_import(self, devel=False):
        """This method imports given module into the configured koji instance as
        a content generator based build

        Raises an exception when error is encountered during import

        :param bool devel: True if the "-devel" module should be created and imported.
            The "-devel" module build contains only the RPMs which are normally filtered
            from the module build. If set to False, normal module build respecting the
            filters is created and imported.
        """
        self.devel = devel
        session = get_session(self.config)
        self._load_koji_tag(session)

        file_dir = self._prepare_file_directory()
        metadata = self._get_content_generator_metadata(file_dir)
        try:
            serverdir = self._upload_outputs(session, metadata, file_dir)
            try:
                build_info = session.CGImport(metadata, serverdir)
            except koji.GenericError as e:
                if "Build already exists" not in str(e):
                    raise
                log.warning("Failed to import content generator")
                build_info = None
            if conf.koji_cg_tag_build:
                self._tag_cg_build()
            if build_info is not None:
                log.info("Content generator import done.")
                log.debug(json.dumps(build_info, sort_keys=True, indent=4))

                # Only remove the logs if CG import was successful.  If it fails,
                # then we want to keep them around for debugging.
                log.info("Removing %r", file_dir)
                shutil.rmtree(file_dir)
        except Exception as e:
            log.exception("Content generator import failed: %s", e)
            raise e
Ejemplo n.º 18
0
def get_prefixed_version(mmd):
    """
    Return the prefixed version of the module based on the buildrequired base module stream.

    :param mmd: the Modulemd.ModuleStream object to format
    :return: the prefixed version
    :rtype: int
    """
    xmd = mmd.get_xmd()
    version = mmd.get_version()

    base_module_stream = None
    for base_module in conf.base_module_names:
        try:
            base_module_stream = xmd["mbs"]["buildrequires"].get(base_module, {}).get("stream")
            if base_module_stream:
                # Break after finding the first base module that is buildrequired
                break
        except KeyError:
            log.warning("The module's mmd is missing information in the xmd section")
            return version
    else:
        log.warning(
            "This module does not buildrequire a base module ({0})".format(
                " or ".join(conf.base_module_names)
            )
        )
        return version

    # The platform version (e.g. prefix1.2.0 => 010200)
    version_prefix = models.ModuleBuild.get_stream_version(base_module_stream, right_pad=False)

    if version_prefix is None:
        log.warning(
            'The "{0}" stream "{1}" couldn\'t be used to prefix the module\'s '
            "version".format(base_module, base_module_stream)
        )
        return version

    # Strip the stream suffix because Modulemd requires version to be an integer
    new_version = int(str(int(math.floor(version_prefix))) + str(version))
    if new_version > GLib.MAXUINT64:
        log.warning(
            'The "{0}" stream "{1}" caused the module\'s version prefix to be '
            "too long".format(base_module, base_module_stream)
        )
        return version
    return new_version
Ejemplo n.º 19
0
def done(msg_id, module_build_id, module_build_state):
    """Called whenever a module enters the 'done' state.

    We currently don't do anything useful, so moving to ready.
    Except for scratch module builds, which remain in the done state.
    Otherwise the done -> ready state should happen when all
    dependent modules were re-built, at least that's the current plan.

    :param str msg_id: the original id of the message being handled, which is
        received from the message bus.
    :param int module_build_id: the module build id.
    :param int module_build_state: the module build state.
    """
    build = models.ModuleBuild.get_by_id(db_session, module_build_id)
    if build.state != module_build_state:
        log.warning(
            "Note that retrieved module state %r doesn't match message module state %r",
            build.state,
            module_build_state,
        )
        # This is ok.. it's a race condition we can ignore.
        pass

    # Scratch builds stay in 'done' state
    if not build.scratch:
        if greenwave is None or greenwave.check_gating(build):
            build.transition(db_session,
                             conf,
                             state=models.BUILD_STATES["ready"])
        else:
            build.state_reason = "Gating failed"
            if greenwave.error_occurred:
                build.state_reason += " (Error occured while querying Greenwave)"
            build.time_modified = datetime.utcnow()
        db_session.commit()

    build_logs.stop(build)
    GenericBuilder.clear_cache(build)
Ejemplo n.º 20
0
def process_open_component_builds():
    log.warning("process_open_component_builds is not yet implemented...")
Ejemplo n.º 21
0
def _apply_dep_overrides(mmd, params):
    """
    Apply the dependency override parameters (if specified) on the input modulemd.

    :param Modulemd.ModuleStream mmd: the modulemd to apply the overrides on
    :param dict params: the API parameters passed in by the user
    :raises ValidationError: if one of the overrides doesn't apply
    """
    dep_overrides = {
        "buildrequires": copy.copy(params.get("buildrequire_overrides", {})),
        "requires": copy.copy(params.get("require_overrides", {})),
    }

    # Parse the module's branch to determine if it should override the stream of the buildrequired
    # module defined in conf.br_stream_override_module
    branch_search = None
    if params.get("branch") and conf.br_stream_override_module and conf.br_stream_override_regexes:
        # Only parse the branch for a buildrequire override if the user didn't manually specify an
        # override for the module specified in conf.br_stream_override_module
        if not dep_overrides["buildrequires"].get(conf.br_stream_override_module):
            branch_search = None
            for regex in conf.br_stream_override_regexes:
                branch_search = re.search(regex, params["branch"])
                if branch_search:
                    log.debug(
                        "The stream override regex `%s` matched the branch %s",
                        regex,
                        params["branch"],
                    )
                    break
            else:
                log.debug('No stream override regexes matched the branch "%s"', params["branch"])

    # If a stream was parsed from the branch, then add it as a stream override for the module
    # specified in conf.br_stream_override_module
    if branch_search:
        # Concatenate all the groups that are not None together to get the desired stream.
        # This approach is taken in case there are sections to ignore.
        # For instance, if we need to parse `el8.0.0` from `rhel-8.0.0`.
        parsed_stream = "".join(group for group in branch_search.groups() if group)
        if parsed_stream:
            dep_overrides["buildrequires"][conf.br_stream_override_module] = [parsed_stream]
            log.info(
                'The buildrequired stream of "%s" was overriden with "%s" based on the branch "%s"',
                conf.br_stream_override_module, parsed_stream, params["branch"],
            )
        else:
            log.warning(
                'The regex `%s` only matched empty capture groups on the branch "%s". The regex is '
                " invalid and should be rewritten.",
                regex, params["branch"],
            )

    unused_dep_overrides = {
        "buildrequires": set(dep_overrides["buildrequires"].keys()),
        "requires": set(dep_overrides["requires"].keys()),
    }

    deps = mmd.get_dependencies()
    for dep in deps:
        overridden = False
        new_dep = Modulemd.Dependencies()
        for dep_type, overrides in dep_overrides.items():
            if dep_type == "buildrequires":
                mmd_dep_type = "buildtime"
            else:
                mmd_dep_type = "runtime"
            # Get the existing streams
            reqs = deps_to_dict(dep, mmd_dep_type)
            # Get the method to add a new stream for this dependency type
            # (e.g. add_buildtime_stream)
            add_func = getattr(new_dep, "add_{}_stream".format(mmd_dep_type))
            add_empty_func = getattr(
                new_dep, "set_empty_{}_dependencies_for_module".format(mmd_dep_type))
            for name, streams in reqs.items():
                if name in dep_overrides[dep_type]:
                    streams_to_add = dep_overrides[dep_type][name]
                    unused_dep_overrides[dep_type].remove(name)
                    overridden = True
                else:
                    streams_to_add = reqs[name]

                if not streams_to_add:
                    add_empty_func(name)
                else:
                    for stream in streams_to_add:
                        add_func(name, stream)
        if overridden:
            # Set the overridden streams
            mmd.remove_dependencies(dep)
            mmd.add_dependencies(new_dep)

    for dep_type in unused_dep_overrides.keys():
        # If a stream override was applied from parsing the branch and it wasn't applicable,
        # just ignore it
        if branch_search and conf.br_stream_override_module in unused_dep_overrides[dep_type]:
            unused_dep_overrides[dep_type].remove(conf.br_stream_override_module)
        if unused_dep_overrides[dep_type]:
            raise ValidationError(
                "The {} overrides for the following modules aren't applicable: {}".format(
                    dep_type[:-1], ", ".join(sorted(unused_dep_overrides[dep_type])))
            )
Ejemplo n.º 22
0
def fail_lost_builds():
    # This function is supposed to be handling only the part which can't be
    # updated through messaging (e.g. srpm-build failures). Please keep it
    # fit `n` slim. We do want rest to be processed elsewhere
    # TODO re-use

    if conf.system == "koji":
        # We don't do this on behalf of users
        koji_session = get_session(conf, login=False)
        log.info("Querying tasks for statuses:")
        res = db_session.query(models.ComponentBuild).filter_by(
            state=koji.BUILD_STATES["BUILDING"]
        ).options(lazyload("module_build")).all()

        log.info("Checking status for %s tasks", len(res))
        for component_build in res:
            log.debug(component_build.json(db_session))
            # Don't check tasks which haven't been triggered yet
            if not component_build.task_id:
                continue

            # Don't check tasks for components which have been reused,
            # they may have BUILDING state temporarily before we tag them
            # to new module tag. Checking them would be waste of resources.
            if component_build.reused_component_id:
                log.debug(
                    'Skipping check for task "%s", the component has been reused ("%s").',
                    component_build.task_id, component_build.reused_component_id
                )
                continue

            task_id = component_build.task_id

            log.info('Checking status of task_id "%s"', task_id)
            task_info = koji_session.getTaskInfo(task_id)

            state_mapping = {
                # Cancelled and failed builds should be marked as failed.
                koji.TASK_STATES["CANCELED"]: koji.BUILD_STATES["FAILED"],
                koji.TASK_STATES["FAILED"]: koji.BUILD_STATES["FAILED"],
                # Completed tasks should be marked as complete.
                koji.TASK_STATES["CLOSED"]: koji.BUILD_STATES["COMPLETE"],
            }

            # If it is a closed/completed task, then we can extract the NVR
            build_version, build_release = None, None  # defaults
            if task_info["state"] == koji.TASK_STATES["CLOSED"]:
                builds = koji_session.listBuilds(taskID=task_id)
                if not builds:
                    log.warning(
                        "Task ID %r is closed, but we found no builds in koji.", task_id)
                elif len(builds) > 1:
                    log.warning(
                        "Task ID %r is closed, but more than one build is present!", task_id)
                else:
                    build_version = builds[0]["version"]
                    build_release = builds[0]["release"]

            log.info("  task %r is in state %r", task_id, task_info["state"])
            if task_info["state"] in state_mapping:
                build_task_finalize.delay(
                    msg_id="producer::fail_lost_builds fake msg",
                    task_id=component_build.task_id,
                    build_new_state=state_mapping[task_info["state"]],
                    build_name=component_build.package,
                    build_release=build_release,
                    build_version=build_version,
                )

    elif conf.system == "mock":
        pass
Ejemplo n.º 23
0
    def _get_arch_mmd_output(self, output_path, arch):
        """
        Returns the CG "output" dict for architecture specific modulemd file.

        :param str output_path: Path where the modulemd files are stored.
        :param str arch: Architecture for which to generate the "output" dict.
        :param dict rpms_dict: Dictionary with all RPMs built in this module.
            The key is NEVRA string, value is RPM dict as obtained from Koji.
            This dict is used to generate architecture specific "components"
            section in the "output" record.
        :rtype: dict
        :return: Dictionary with record in "output" list.
        """
        ret = {
            "buildroot_id": 1,
            "arch": arch,
            "type": "file",
            "extra": {"typeinfo": {"module": {}}},
            "checksum_type": "md5",
        }

        # Noarch architecture represents "generic" modulemd.txt.
        if arch == "noarch":
            mmd_filename = "modulemd.txt"
        else:
            mmd_filename = "modulemd.%s.txt" % arch

        # Read the modulemd file to get the filesize/checksum and also
        # parse it to get the Modulemd instance.
        mmd_path = os.path.join(output_path, mmd_filename)
        try:
            with open(mmd_path, "rb") as mmd_f:
                raw_data = mmd_f.read()
                data = to_text_type(raw_data)
                mmd = load_mmd(data)
                ret["filename"] = mmd_filename
                ret["filesize"] = len(raw_data)
                ret["checksum"] = hashlib.md5(raw_data).hexdigest()
        except IOError:
            if arch == "src":
                # This might happen in case the Module is submitted directly
                # using the yaml without SCM URL. This should never happen
                # when building production-ready modules using Koji, but in
                # theory it is possible.
                log.warning("No modulemd.src.txt found.")
                return
            else:
                raise

        components = []
        if arch in ["noarch", "src"]:
            # For generic noarch/src modulemd, include all the RPMs.
            for rpm in self.rpms:
                components.append(self._koji_rpm_to_component_record(rpm))
        else:
            # Check the RPM artifacts built for this architecture in modulemd file,
            # find the matching RPM in the `rpms_dict` coming from Koji and use it
            # to generate list of components for this architecture.
            # We cannot simply use the data from MMD here without `rpms_dict`, because
            # RPM sigmd5 signature is not stored in MMD.
            for rpm in mmd.get_rpm_artifacts():
                if rpm not in self.rpms_dict:
                    raise RuntimeError(
                        "RPM %s found in the final modulemd but not in Koji tag." % rpm)
                tag_rpm = self.rpms_dict[rpm]
                components.append(self._koji_rpm_to_component_record(tag_rpm))
        ret["components"] = components
        return ret
Ejemplo n.º 24
0
def get_build_arches(mmd, config):
    """
    Returns the list of architectures for which the module `mmd` should be built.

    :param mmd: Module MetaData
    :param config: config (module_build_service.common.config.Config instance)
    :return: list of architectures
    """
    # Imported here to allow import of utils in GenericBuilder.
    from module_build_service.builder import GenericBuilder

    nsvc = mmd.get_nsvc()

    def _conditional_log(msg, arches, new_arches):
        # Checks if the arch list returned by _check_buildopts_arches is the same one passed to it
        # If it is, it outputs the message
        if arches is new_arches:
            log.info(msg)

    # At first, handle BASE_MODULE_ARCHES - this overrides any other option.
    # Find out the base modules in buildrequires section of XMD and
    # set the Koji tag arches according to it.
    if "mbs" in mmd.get_xmd():
        for req_name, req_data in mmd.get_xmd()["mbs"]["buildrequires"].items(
        ):
            ns = ":".join([req_name, req_data["stream"]])
            if ns in config.base_module_arches:
                arches = config.base_module_arches[ns]
                new_arches = _check_buildopts_arches(mmd, arches)
                msg = "Setting build arches of %s to %r based on the BASE_MODULE_ARCHES." % (
                    nsvc, new_arches)
                _conditional_log(msg, arches, new_arches)
                return new_arches

    # Check whether the module contains the `koji_tag_arches`. This is used only
    # by special modules defining the layered products.
    try:
        arches = mmd.get_xmd()["mbs"]["koji_tag_arches"]
        new_arches = _check_buildopts_arches(mmd, arches)
        msg = "Setting build arches of %s to %r based on the koji_tag_arches." % (
            nsvc, new_arches)
        _conditional_log(msg, arches, new_arches)
        return new_arches
    except KeyError:
        pass

    # Check the base/layered-product module this module buildrequires and try to get the
    # list of arches from there.
    try:
        buildrequires = mmd.get_xmd()["mbs"]["buildrequires"]
    except (ValueError, KeyError):
        log.warning("Module {0} does not have buildrequires in its xmd".format(
            mmd.get_nsvc()))
        buildrequires = None
    if buildrequires:
        # Looping through all the privileged modules that are allowed to set koji tag arches
        # and the base modules to see what the koji tag arches should be. Doing it this way
        # preserves the order in the configurations.
        for module in conf.allowed_privileged_module_names + conf.base_module_names:
            module_in_xmd = buildrequires.get(module)

            if not module_in_xmd:
                continue

            module_obj = models.ModuleBuild.get_build_from_nsvc(
                db_session,
                module,
                module_in_xmd["stream"],
                module_in_xmd["version"],
                module_in_xmd["context"],
            )
            if not module_obj:
                continue
            arches = GenericBuilder.get_module_build_arches(module_obj)
            if arches:
                new_arches = _check_buildopts_arches(mmd, arches)
                msg = "Setting build arches of %s to %r based on the buildrequired module %r." % (
                    nsvc, new_arches, module_obj)
                _conditional_log(msg, arches, new_arches)
                return new_arches

    # As a last resort, return just the preconfigured list of arches.
    arches = config.arches
    new_arches = _check_buildopts_arches(mmd, arches)
    msg = "Setting build arches of %s to %r based on default ARCHES." % (
        nsvc, new_arches)
    _conditional_log(msg, arches, new_arches)
    return new_arches
Ejemplo n.º 25
0
def decision_update(msg_id, decision_context, subject_identifier,
                    policies_satisfied):
    """Move module build to ready or failed according to Greenwave result

    :param str msg_id: the original id of the message being handled which is
        received from the message bus.
    :param str decision_context: the context of the greewave decision. Refer to
        the messaging document for detailed information.
    :param str subject_identifier: usually a build NVR. Refer to
        https://docs.pagure.org/greenwave/messaging.html for detailed information.
    :param bool policies_satisfied: whether the build satisfies Greenwave rules.
        Refer to the messaging document for detailed information.
    """
    if not conf.greenwave_decision_context:
        log.debug(
            "Skip Greenwave message %s as MBS does not have GREENWAVE_DECISION_CONTEXT "
            "configured",
            msg_id,
        )
        return

    if decision_context != conf.greenwave_decision_context:
        log.debug(
            "Skip Greenwave message %s as MBS only handles messages with the "
            'decision context "%s"',
            msg_id,
            conf.greenwave_decision_context,
        )
        return

    module_build_nvr = subject_identifier

    if not policies_satisfied:
        log.debug(
            "Skip to handle module build %s because it has not satisfied Greenwave policies.",
            module_build_nvr,
        )
        return

    build = get_corresponding_module_build(module_build_nvr)

    if build is None:
        log.debug(
            "No corresponding module build of subject_identifier %s is found.",
            module_build_nvr)
        return

    if build.state == BUILD_STATES["done"]:
        build.transition(
            db_session,
            conf,
            BUILD_STATES["ready"],
            state_reason="Module build {} has satisfied Greenwave policies.".
            format(module_build_nvr),
        )
    else:
        log.warning(
            "Module build %s is not in done state but Greenwave tells "
            "it passes tests in decision context %s",
            module_build_nvr,
            decision_context,
        )

    db_session.commit()
Ejemplo n.º 26
0
def done(msg_id, tag_name):
    """Called whenever koji rebuilds a repo, any repo.

    :param str msg_id: the original id of the message being handled which is
        received from the message bus.
    :param str tag_name: the tag name from which the repo is generated.
    """

    # First, find our ModuleBuild associated with this repo, if any.
    if conf.system in ("koji", "test") and not tag_name.endswith("-build"):
        log.debug("Tag %r does not end with '-build' suffix, ignoring",
                  tag_name)
        return
    tag = tag_name[:-6] if tag_name.endswith("-build") else tag_name
    module_build = models.ModuleBuild.get_by_tag(db_session, tag_name)
    if not module_build:
        log.debug("No module build found associated with koji tag %r" % tag)
        return

    # It is possible that we have already failed.. but our repo is just being
    # routinely regenerated.  Just ignore that.  If module_build_service says the module is
    # dead, then the module is dead.
    if module_build.state == models.BUILD_STATES["failed"]:
        log.info("Ignoring repo regen for already failed %r" % module_build)
        return

    # If there are no components in this module build, then current_batch will be empty
    if module_build.component_builds:
        current_batch = module_build.current_batch()
    else:
        current_batch = []

    # Get the list of untagged components in current/previous batches which
    # have been built successfully
    if conf.system in ("koji", "test") and current_batch:
        if any(c.is_completed and not c.is_tagged
               for c in module_build.up_to_current_batch()):
            log.info(
                "Ignoring repo regen, because not all components are tagged.")
            return
        if all(c.is_waiting_for_build for c in current_batch):
            log.info(
                "Ignoring repo regen because no components have started in the batch."
            )
            return

    # If any in the current batch are still running.. just wait.
    running = [c for c in current_batch if c.is_building]
    if running:
        log.info(
            "%r has %r of %r components still building in this batch (%r total)",
            module_build, len(running), len(current_batch),
            len(module_build.component_builds))
        return

    # Assemble the list of all successful components in the batch.
    good = [c for c in current_batch if c.is_completed]

    # If *none* of the components completed for this batch, then obviously the
    # module fails.  However!  We shouldn't reach this scenario.  There is
    # logic over in the component handler which should fail the module build
    # first before we ever get here.  This is here as a race condition safety
    # valve.
    if module_build.component_builds and not good:
        state_reason = "Component(s) {} failed to build.".format(", ".join(
            c.package for c in current_batch if c.is_unsuccessful))
        module_build.transition(db_session,
                                conf,
                                models.BUILD_STATES["failed"],
                                state_reason,
                                failure_type="infra")
        db_session.commit()
        log.warning("Odd!  All components in batch failed for %r." %
                    module_build)
        return

    groups = GenericBuilder.default_buildroot_groups(db_session, module_build)
    builder = GenericBuilder.create(
        db_session,
        module_build.owner,
        module_build,
        conf.system,
        conf,
        tag_name=tag,
        components=[c.package for c in module_build.component_builds],
    )
    builder.buildroot_connect(groups)

    # If we have reached here then we know the following things:
    #
    # - All components in this batch have finished (failed or succeeded)
    # - One or more succeeded.
    # - They have been regenerated back into the buildroot.
    #
    # So now we can either start a new batch if there are still some to build
    # or, if everything is built successfully, then we can bless the module as
    # complete.
    has_unbuilt_components = any(c.is_unbuilt
                                 for c in module_build.component_builds)
    has_failed_components = any(c.is_unsuccessful
                                for c in module_build.component_builds)

    if has_unbuilt_components and not has_failed_components:
        # Ok, for the subset of builds that did complete successfully, check to
        # see if they are in the buildroot before starting new batch.
        artifacts = [component_build.nvr for component_build in good]
        if not builder.buildroot_ready(artifacts):
            log.info("Not all of %r are in the buildroot.  Waiting." %
                     artifacts)
            return

        # Try to start next batch build, because there are still unbuilt
        # components in a module.
        start_next_batch_build(conf, module_build, builder)
    else:
        if has_failed_components:
            state_reason = "Component(s) {} failed to build.".format(", ".join(
                c.package for c in module_build.component_builds
                if c.is_unsuccessful))
            module_build.transition(
                db_session,
                conf,
                state=models.BUILD_STATES["failed"],
                state_reason=state_reason,
                failure_type="user",
            )
        else:
            # Tell the external buildsystem to wrap up (CG import, createrepo, etc.)
            module_build.time_completed = datetime.utcnow()
            builder.finalize(succeeded=True)

            module_build.transition(db_session,
                                    conf,
                                    state=models.BUILD_STATES["done"])
        db_session.commit()
Ejemplo n.º 27
0
from __future__ import absolute_import
import json
import ssl

from dogpile.cache import make_region
from flask import g
import requests

from module_build_service import app
from module_build_service.common import conf, log
from module_build_service.common.errors import Unauthorized, Forbidden

try:
    import ldap3
except ImportError:
    log.warning("ldap3 import not found.  ldap/krb disabled.")

client_secrets = None
region = make_region().configure("dogpile.cache.memory")


def _json_loads(content):
    if not isinstance(content, str):
        content = content.decode("utf-8")
    return json.loads(content)


def _load_secrets():
    global client_secrets
    if client_secrets:
        return
Ejemplo n.º 28
0
def import_mmd(db_session, mmd, check_buildrequires=True):
    """
    Imports new module build defined by `mmd` to MBS database using `session`.
    If it already exists, it is updated.

    The ModuleBuild.koji_tag is set according to xmd['mbs]['koji_tag'].
    The ModuleBuild.state is set to "ready".
    The ModuleBuild.rebuild_strategy is set to "all".
    The ModuleBuild.owner is set to "mbs_import".

    :param db_session: SQLAlchemy session object.
    :param mmd: module metadata being imported into database.
    :type mmd: Modulemd.ModuleStream
    :param bool check_buildrequires: When True, checks that the buildrequires defined in the MMD
        have matching records in the `mmd["xmd"]["mbs"]["buildrequires"]` and also fills in
        the `ModuleBuild.buildrequires` according to this data.
    :return: module build (ModuleBuild),
             log messages collected during import (list)
    :rtype: tuple
    """
    from module_build_service.common import models

    xmd = mmd.get_xmd()
    # Set some defaults in xmd["mbs"] if they're not provided by the user
    if "mbs" not in xmd:
        xmd["mbs"] = {"mse": True}

    if not mmd.get_context():
        mmd.set_context(models.DEFAULT_MODULE_CONTEXT)

    # NSVC is used for logging purpose later.
    nsvc = mmd.get_nsvc()
    if nsvc is None:
        msg = "Both the name and stream must be set for the modulemd being imported."
        log.error(msg)
        raise UnprocessableEntity(msg)

    name = mmd.get_module_name()
    stream = mmd.get_stream_name()
    version = str(mmd.get_version())
    context = mmd.get_context()

    xmd_mbs = xmd["mbs"]

    disttag_marking = xmd_mbs.get("disttag_marking")

    # If it is a base module, then make sure the value that will be used in the RPM disttags
    # doesn't contain a dash since a dash isn't allowed in the release field of the NVR
    if name in conf.base_module_names:
        if disttag_marking and "-" in disttag_marking:
            msg = "The disttag_marking cannot contain a dash"
            log.error(msg)
            raise UnprocessableEntity(msg)
        if not disttag_marking and "-" in stream:
            msg = "The stream cannot contain a dash unless disttag_marking is set"
            log.error(msg)
            raise UnprocessableEntity(msg)

    virtual_streams = xmd_mbs.get("virtual_streams", [])

    # Verify that the virtual streams are the correct type
    if virtual_streams and (
        not isinstance(virtual_streams, list)
        or any(not isinstance(vs, string_types) for vs in virtual_streams)
    ):
        msg = "The virtual streams must be a list of strings"
        log.error(msg)
        raise UnprocessableEntity(msg)

    if check_buildrequires:
        deps = mmd.get_dependencies()
        if len(deps) > 1:
            raise UnprocessableEntity(
                "The imported module's dependencies list should contain just one element")

        if "buildrequires" not in xmd_mbs:
            # Always set buildrequires if it is not there, because
            # get_buildrequired_base_modules requires xmd/mbs/buildrequires exists.
            xmd_mbs["buildrequires"] = {}
            mmd.set_xmd(xmd)

        if len(deps) > 0:
            brs = set(deps[0].get_buildtime_modules())
            xmd_brs = set(xmd_mbs["buildrequires"].keys())
            if brs - xmd_brs:
                raise UnprocessableEntity(
                    "The imported module buildrequires other modules, but the metadata in the "
                    'xmd["mbs"]["buildrequires"] dictionary is missing entries'
                )

    if "koji_tag" not in xmd_mbs:
        log.warning("'koji_tag' is not set in xmd['mbs'] for module {}".format(nsvc))
        log.warning("koji_tag will be set to None for imported module build.")

    # Log messages collected during import
    msgs = []

    # Get the ModuleBuild from DB.
    build = models.ModuleBuild.get_build_from_nsvc(db_session, name, stream, version, context)
    if build:
        msg = "Updating existing module build {}.".format(nsvc)
        log.info(msg)
        msgs.append(msg)
    else:
        build = models.ModuleBuild()
        db_session.add(build)

    build.name = name
    build.stream = stream
    build.version = version
    build.koji_tag = xmd_mbs.get("koji_tag")
    build.state = models.BUILD_STATES["ready"]
    build.modulemd = mmd_to_str(mmd)
    build.context = context
    build.owner = "mbs_import"
    build.rebuild_strategy = "all"
    now = datetime.utcnow()
    build.time_submitted = now
    build.time_modified = now
    build.time_completed = now
    if build.name in conf.base_module_names:
        build.stream_version = models.ModuleBuild.get_stream_version(stream)

    # Record the base modules this module buildrequires
    if check_buildrequires:
        for base_module in build.get_buildrequired_base_modules(db_session):
            if base_module not in build.buildrequires:
                build.buildrequires.append(base_module)

    build.update_virtual_streams(db_session, virtual_streams)

    db_session.commit()

    msg = "Module {} imported".format(nsvc)
    log.info(msg)
    msgs.append(msg)

    return build, msgs
Ejemplo n.º 29
0
    @url.setter
    def url(self, value):
        value = value.rstrip("/")
        if not value:
            raise RuntimeError("No Greenwave URL set")
        self._url = value

    @property
    def decision_context(self):
        return self._decision_context

    @property
    def subject_type(self):
        return self._subj_type

    @property
    def timeout(self):
        return self._gw_timeout

    @timeout.setter
    def timeout(self, value):
        self._gw_timeout = value


try:
    greenwave = Greenwave()
except RuntimeError:
    log.warning('Greenwave is not configured or configured improperly')
    greenwave = None
def add_default_modules(mmd):
    """
    Add default modules as buildrequires to the input modulemd.

    The base modules that are buildrequired can optionally link their default modules by specifying
    a URL to a text file in xmd.mbs.default_modules_url. Any default module that isn't in the
    database will be logged and ignored.

    :param Modulemd.ModuleStream mmd: the modulemd of the module to add the module defaults to
    :raises RuntimeError: if the buildrequired base module isn't in the database or the default
        modules list can't be downloaded
    """
    log.info("Finding the default modules to include as buildrequires")
    xmd = mmd.get_xmd()
    buildrequires = xmd["mbs"]["buildrequires"]
    defaults_added = False

    for module_name in conf.base_module_names:
        bm_info = buildrequires.get(module_name)
        if bm_info is None:
            log.debug(
                "The base module %s is not a buildrequire of the submitted module %s",
                module_name,
                mmd.get_nsvc(),
            )
            continue

        bm = models.ModuleBuild.get_build_from_nsvc(
            db_session,
            module_name,
            bm_info["stream"],
            bm_info["version"],
            bm_info["context"],
        )
        bm_nsvc = ":".join([
            module_name,
            bm_info["stream"],
            bm_info["version"],
            bm_info["context"],
        ])
        if not bm:
            raise RuntimeError(
                "Failed to retrieve the module {} from the database".format(
                    bm_nsvc))

        bm_mmd = bm.mmd()
        bm_xmd = bm_mmd.get_xmd()
        use_default_modules = bm_xmd.get("mbs",
                                         {}).get("use_default_modules", False)
        default_modules_scm_url = bm_xmd.get("mbs",
                                             {}).get("default_modules_scm_url")
        if not (use_default_modules or default_modules_scm_url):
            log.info('The base module %s has no default modules',
                     bm_mmd.get_nsvc())
            continue

        # If the base module does not provide a default_modules_scm_url, use the default that is
        # configured
        default_modules_scm_url = default_modules_scm_url or conf.default_modules_scm_url
        default_modules = _get_default_modules(bm.stream,
                                               default_modules_scm_url)
        for name, stream in default_modules.items():
            ns = "{}:{}".format(name, stream)
            if name in buildrequires:
                conflicting_stream = buildrequires[name]["stream"]
                if stream == conflicting_stream:
                    log.info("The default module %s is already a buildrequire",
                             ns)
                    continue

                log.info(
                    "The default module %s will not be added as a buildrequire since %s:%s "
                    "is already a buildrequire",
                    ns,
                    name,
                    conflicting_stream,
                )
                continue

            # Query for the latest default module that was built against this base module
            resolver = GenericResolver.create(db_session, conf)
            base_mmds = get_compatible_base_module_mmds(resolver, bm_mmd)
            base_mmds = base_mmds["ready"] + base_mmds["garbage"]
            base_mmds.sort(key=lambda mmd: models.ModuleBuild.
                           get_stream_version(mmd.get_stream_name(), False),
                           reverse=True)
            for base_mmd in base_mmds:
                default_module_mmds = resolver.get_buildrequired_modulemds(
                    name, stream, base_mmd)
                if not default_module_mmds:
                    continue

                # We need to ensure that module built against compatible base module stream
                # really contains runtime-dependency on the current base module stream.
                # For example in Fedora, we can have platform:f30 and platform:f31 base module
                # streams. There can be foo:1 module built against platform:f30 which can work with
                # any platform ("requires: platform: []"). This module can be configured as default
                # module for platform:f28 and we need to support this case, but in the same time we
                # cannot simply add any platform:f27 based module to platform:f28.
                module_found = False
                for default_module_mmd in default_module_mmds:
                    for deps in default_module_mmd.get_dependencies():
                        streams = deps.get_runtime_streams(module_name)
                        if streams is None:
                            continue
                        streams = expand_single_mse_streams(
                            db_session, module_name, streams)
                        if bm_info["stream"] in streams:
                            module_found = True
                            break
                    else:
                        log.info(
                            "Not using module %s as default module, because it does not "
                            "contain runtime dependency on %s",
                            default_module_mmd.get_nsvc(), bm_nsvc)
                if module_found:
                    break
            else:
                log.warning(
                    "The default module %s from %s is not in the database and couldn't be added as "
                    "a buildrequire",
                    ns,
                    bm_nsvc,
                )
                continue
            # Use resolve_requires since it provides the exact format that is needed for
            # mbs.xmd.buildrequires
            resolved = resolver.resolve_requires(
                [default_module_mmd.get_nsvc()])

            nsvc = ":".join([
                name, stream, resolved[name]["version"],
                resolved[name]["context"]
            ])
            log.info("Adding the default module %s as a buildrequire", nsvc)
            buildrequires.update(resolved)
            defaults_added = True

    if defaults_added:
        mmd.set_xmd(xmd)
    return defaults_added