Пример #1
0
def retrigger_new_repo_on_failure():
    """
    Retrigger failed new repo tasks for module builds in the build state.

    The newRepo task may fail for various reasons outside the scope of MBS.
    This method will detect this scenario and retrigger the newRepo task
    if needed to avoid the module build from being stuck in the "build" state.
    """
    if conf.system != "koji":
        return

    koji_session = get_session(conf)
    module_builds = db_session.query(models.ModuleBuild).filter(
        models.ModuleBuild.state == models.BUILD_STATES["build"],
        models.ModuleBuild.new_repo_task_id.isnot(None),
    ).all()

    for module_build in module_builds:
        task_info = koji_session.getTaskInfo(module_build.new_repo_task_id)
        if task_info["state"] in [koji.TASK_STATES["CANCELED"], koji.TASK_STATES["FAILED"]]:
            log.info(
                "newRepo task %s for %r failed, starting another one",
                str(module_build.new_repo_task_id), module_build,
            )
            taginfo = koji_session.getTag(module_build.koji_tag + "-build")
            module_build.new_repo_task_id = koji_session.newRepo(taginfo["name"])

    db_session.commit()
Пример #2
0
def find_module_built_rpms(modules_nsvc):
    """Find out built RPMs of given modules

    :param modules_nsvc: a list of modules' NSVC to find out built RPMs for
        each of them.
    :type modules_nsvc: list[str]
    :return: a sorted list of RPMs, each of them is represented as NEVR.
    :rtype: list[str]
    """
    import kobo.rpmlib
    resolver = GenericResolver.create(db_session, conf)

    built_rpms = []
    koji_session = get_session(conf, login=False)

    for nsvc in modules_nsvc:
        name, stream, version, context = nsvc.split(":")
        module = resolver.get_module(name,
                                     stream,
                                     version,
                                     context,
                                     strict=True)
        rpms = koji_session.listTaggedRPMS(module["koji_tag"], latest=True)[0]
        built_rpms.extend(
            kobo.rpmlib.make_nvr(rpm, force_epoch=True) for rpm in rpms)

    # In case there is duplicate NEVRs, ensure every NEVR is unique in the final list.
    # And, sometimes, sorted list of RPMs would be easier to read.
    return sorted(set(built_rpms))
Пример #3
0
    def _tag_cg_build(self):
        """
        Tags the Content Generator build to module.cg_build_koji_tag.
        """
        session = get_session(self.config)

        tag_name = self.module.cg_build_koji_tag
        if not tag_name:
            log.info(
                "%r: Not tagging Content Generator build, no cg_build_koji_tag set", self.module)
            return

        tag_names_to_try = [tag_name, self.config.koji_cg_default_build_tag]
        for tag in tag_names_to_try:
            log.info("Trying %s", tag)
            tag_info = session.getTag(tag)
            if tag_info:
                break

            log.info("%r: Tag %s not found in Koji, trying next one.", self.module, tag)

        if not tag_info:
            log.warning(
                "%r:, Not tagging Content Generator build, no available tag found, tried %r",
                self.module, tag_names_to_try,
            )
            return

        build = self._get_build()
        nvr = "%s-%s-%s" % (build["name"], build["version"], build["release"])

        log.info("Content generator build %s will be tagged as %s in Koji", nvr, tag)
        session.tagBuild(tag_info["id"], nvr)
Пример #4
0
 def _get_build(self):
     ret = self.module.nvr
     if self.devel:
         ret["name"] += "-devel"
     ret[u"source"] = self.module.scmurl
     ret[u"start_time"] = calendar.timegm(self.module.time_submitted.utctimetuple())
     ret[u"end_time"] = calendar.timegm(self.module.time_completed.utctimetuple())
     ret[u"extra"] = {
         u"typeinfo": {
             u"module": {
                 u"module_build_service_id": self.module.id,
                 u"content_koji_tag": self.module.koji_tag,
                 u"modulemd_str": self._get_fixed_mmd(),
                 u"name": ret["name"],
                 u"stream": self.module.stream,
                 u"version": self.module.version,
                 u"context": self.module.context,
             }
         }
     }
     session = get_session(self.config, login=False)
     # Only add the CG build owner if the user exists in Koji
     if session.getUser(self.owner):
         ret[u"owner"] = self.owner
     return ret
Пример #5
0
    def get_final_mmds(self):
        # Returns dict of finalized mmds. Used to generate final modulemd files for scratch builds.
        session = get_session(self.config, login=False)
        self._load_koji_tag(session)

        finalmmds = {}
        for arch in self.arches:
            finalmmds[arch] = self._finalize_mmd(arch)

        return finalmmds
def _get_rawhide_version():
    """
    Query Koji to find the rawhide version from the build target.

    :return: the rawhide version (e.g. "f32")
    :rtype: str
    """
    koji_session = get_session(conf, login=False)
    build_target = koji_session.getBuildTarget("rawhide")
    if build_target:
        return build_target["build_tag_name"].partition("-build")[0]
Пример #7
0
def sync_koji_build_tags():
    """
    Method checking the "tagged" and "tagged_in_final" attributes of
    "complete" ComponentBuilds in the current batch of module builds
    in "building" state against the Koji.

    In case the Koji shows the build as tagged/tagged_in_final,
    fake "tagged" message is added to work queue.
    """
    if conf.system != "koji":
        return

    koji_session = get_session(conf, login=False)

    threshold = datetime.utcnow() - timedelta(minutes=10)
    module_builds = db_session.query(models.ModuleBuild).filter(
        models.ModuleBuild.time_modified < threshold,
        models.ModuleBuild.state == models.BUILD_STATES["build"]
    ).all()
    for module_build in module_builds:
        complete_components = module_build.current_batch(koji.BUILD_STATES["COMPLETE"])
        for c in complete_components:
            # In case the component is tagged in the build tag and
            # also tagged in the final tag (or it is build_time_only
            # and therefore should not be tagged in final tag), skip it.
            if c.tagged and (c.tagged_in_final or c.build_time_only):
                continue

            log.info(
                "%r: Component %r is complete, but not tagged in the "
                "final and/or build tags.",
                module_build, c,
            )

            # Check in which tags the component is tagged.
            tag_dicts = koji_session.listTags(c.nvr)
            tags = [tag_dict["name"] for tag_dict in tag_dicts]

            # If it is tagged in final tag, but MBS does not think so,
            # schedule fake message.
            if not c.tagged_in_final and module_build.koji_tag in tags:
                log.info(
                    "Apply tag %s to module build %r",
                    module_build.koji_tag, module_build)
                tagged.delay("internal:sync_koji_build_tags", module_build.koji_tag, c.nvr)

            # If it is tagged in the build tag, but MBS does not think so,
            # schedule fake message.
            build_tag = module_build.koji_tag + "-build"
            if not c.tagged and build_tag in tags:
                log.info(
                    "Apply build tag %s to module build %r",
                    build_tag, module_build)
                tagged.delay("internal:sync_koji_build_tags", build_tag, c.nvr)
Пример #8
0
 def get_built_rpms_in_module_build(cls, mmd):
     """
     :param Modulemd mmd: Modulemd to get the built RPMs from.
     :return: list of NVRs
     """
     build = models.ModuleBuild.get_build_from_nsvc(db_session,
                                                    mmd.get_module_name(),
                                                    mmd.get_stream_name(),
                                                    mmd.get_version(),
                                                    mmd.get_context())
     if build.koji_tag.startswith("repofile://"):
         # Modules from local repository have already the RPMs filled in mmd.
         return mmd.get_rpm_artifacts()
     else:
         koji_session = get_session(conf, login=False)
         rpms = koji_session.listTaggedRPMS(build.koji_tag, latest=True)[0]
         nvrs = set(
             kobo.rpmlib.make_nvr(rpm, force_epoch=True) for rpm in rpms)
         return list(nvrs)
Пример #9
0
def delete_old_koji_targets():
    """
    Deletes targets older than `config.koji_target_delete_time` seconds
    from Koji to cleanup after the module builds.
    """
    if conf.system != "koji":
        return

    log.info("Looking for module builds which Koji target can be removed")

    now = datetime.utcnow()

    koji_session = get_session(conf)
    for target in koji_session.getBuildTargets():
        module = db_session.query(models.ModuleBuild).filter(
            models.ModuleBuild.koji_tag == target["dest_tag_name"],
            models.ModuleBuild.name.notin_(conf.base_module_names),
            models.ModuleBuild.state.notin_([
                models.BUILD_STATES["init"],
                models.BUILD_STATES["wait"],
                models.BUILD_STATES["build"],
            ]),
        ).options(
            load_only("time_completed"),
        ).first()

        if module is None:
            continue

        # Double-check that the target we are going to remove is prefixed
        # by our prefix, so we won't remove f26 when there is some garbage
        # in DB or Koji.
        for allowed_prefix in conf.koji_tag_prefixes:
            if target["name"].startswith(allowed_prefix + "-"):
                break
        else:
            log.error("Module %r has Koji target with not allowed prefix.", module)
            continue

        delta = now - module.time_completed
        if delta.total_seconds() > conf.koji_target_delete_time:
            log.info("Removing target of module %r", module)
            koji_session.deleteBuildTarget(target["id"])
Пример #10
0
 def get_average_build_time(self, component):
     """
     Get the average build time of the component from Koji
     :param component: a ComponentBuild object
     :return: a float of the average build time in seconds
     """
     # We currently don't track build times in MBS directly, so we can use Koji to get a decent
     # estimate
     if not self.koji_session:
         # If Koji is not configured on the system, then just return 0.0 for components
         try:
             self.koji_session = get_session(self.config, login=False)
             # If the component has not been built before, then None is returned. Instead,
             # let's return 0.0 so the type is consistent
             return self.koji_session.getAverageBuildDuration(
                 component.package) or 0.0
         except Exception:
             log.debug(
                 "The Koji call to getAverageBuildDuration failed. Is Koji properly configured?"
             )
             return 0.0
Пример #11
0
def get_modulemds_from_ursine_content(tag):
    """Get all modules metadata which were added to ursine content

    Ursine content is the tag inheritance managed by Ursa-Major by adding
    specific modules' koji_tag.

    Background of module build based on ursine content:

    Each module build buildrequires a platform module, which is a presudo-module
    used to connect to an external repository whose packages will be present
    in the buildroot. In practice, the external repo is generated from a build
    tag which could inherit from a few module koji_tags so that those module's
    RPMs could be build dependencies for some specific packages.

    So, this function is to find out all module koji_tags from the build tag
    and return corresponding module metadata.

    :param str tag: a base module's koji_tag.
    :return: list of module metadata. Empty list will be returned if no ursine
        modules metadata is found.
    :rtype: list[Modulemd.Module]
    """
    resolver = GenericResolver.create(db_session, conf)

    koji_session = get_session(conf, login=False)
    repos = koji_session.getExternalRepoList(tag)
    build_tags = find_build_tags_from_external_repos(koji_session, repos)
    if not build_tags:
        log.debug("No external repo containing ursine content is found.")
        return []
    modulemds = []
    for tag in build_tags:
        koji_tags = find_module_koji_tags(koji_session, tag)
        for koji_tag in koji_tags:
            md = resolver.get_modulemd_by_koji_tag(koji_tag)
            if md:
                modulemds.append(md)
            else:
                log.warning("No module is found by koji_tag '%s'", koji_tag)
    return modulemds
Пример #12
0
    def koji_import(self, devel=False):
        """This method imports given module into the configured koji instance as
        a content generator based build

        Raises an exception when error is encountered during import

        :param bool devel: True if the "-devel" module should be created and imported.
            The "-devel" module build contains only the RPMs which are normally filtered
            from the module build. If set to False, normal module build respecting the
            filters is created and imported.
        """
        self.devel = devel
        session = get_session(self.config)
        self._load_koji_tag(session)

        file_dir = self._prepare_file_directory()
        metadata = self._get_content_generator_metadata(file_dir)
        try:
            serverdir = self._upload_outputs(session, metadata, file_dir)
            try:
                build_info = session.CGImport(metadata, serverdir)
            except koji.GenericError as e:
                if "Build already exists" not in str(e):
                    raise
                log.warning("Failed to import content generator")
                build_info = None
            if conf.koji_cg_tag_build:
                self._tag_cg_build()
            if build_info is not None:
                log.info("Content generator import done.")
                log.debug(json.dumps(build_info, sort_keys=True, indent=4))

                # Only remove the logs if CG import was successful.  If it fails,
                # then we want to keep them around for debugging.
                log.info("Removing %r", file_dir)
                shutil.rmtree(file_dir)
        except Exception as e:
            log.exception("Content generator import failed: %s", e)
            raise e
Пример #13
0
def get_corresponding_module_build(nvr):
    """Find corresponding module build from database and return

    :param str nvr: module build NVR. This is the subject_identifier included
        inside ``greenwave.decision.update`` message.
    :return: the corresponding module build object. For whatever the reason,
        if the original module build id cannot be found from the Koji build of
        ``nvr``, None will be returned.
    :rtype: :class:`ModuleBuild` or None
    """
    koji_session = get_session(conf, login=False)
    build_info = koji_session.getBuild(nvr)
    if build_info is None:
        return None

    try:
        module_build_id = build_info["extra"]["typeinfo"]["module"][
            "module_build_service_id"]
    except KeyError:
        # If any of the keys is not present, the NVR is not the one for
        # handling Greenwave event.
        return None

    return ModuleBuild.get_by_id(db_session, module_build_id)
Пример #14
0
def fail_lost_builds():
    # This function is supposed to be handling only the part which can't be
    # updated through messaging (e.g. srpm-build failures). Please keep it
    # fit `n` slim. We do want rest to be processed elsewhere
    # TODO re-use

    if conf.system == "koji":
        # We don't do this on behalf of users
        koji_session = get_session(conf, login=False)
        log.info("Querying tasks for statuses:")
        res = db_session.query(models.ComponentBuild).filter_by(
            state=koji.BUILD_STATES["BUILDING"]
        ).options(lazyload("module_build")).all()

        log.info("Checking status for %s tasks", len(res))
        for component_build in res:
            log.debug(component_build.json(db_session))
            # Don't check tasks which haven't been triggered yet
            if not component_build.task_id:
                continue

            # Don't check tasks for components which have been reused,
            # they may have BUILDING state temporarily before we tag them
            # to new module tag. Checking them would be waste of resources.
            if component_build.reused_component_id:
                log.debug(
                    'Skipping check for task "%s", the component has been reused ("%s").',
                    component_build.task_id, component_build.reused_component_id
                )
                continue

            task_id = component_build.task_id

            log.info('Checking status of task_id "%s"', task_id)
            task_info = koji_session.getTaskInfo(task_id)

            state_mapping = {
                # Cancelled and failed builds should be marked as failed.
                koji.TASK_STATES["CANCELED"]: koji.BUILD_STATES["FAILED"],
                koji.TASK_STATES["FAILED"]: koji.BUILD_STATES["FAILED"],
                # Completed tasks should be marked as complete.
                koji.TASK_STATES["CLOSED"]: koji.BUILD_STATES["COMPLETE"],
            }

            # If it is a closed/completed task, then we can extract the NVR
            build_version, build_release = None, None  # defaults
            if task_info["state"] == koji.TASK_STATES["CLOSED"]:
                builds = koji_session.listBuilds(taskID=task_id)
                if not builds:
                    log.warning(
                        "Task ID %r is closed, but we found no builds in koji.", task_id)
                elif len(builds) > 1:
                    log.warning(
                        "Task ID %r is closed, but more than one build is present!", task_id)
                else:
                    build_version = builds[0]["version"]
                    build_release = builds[0]["release"]

            log.info("  task %r is in state %r", task_id, task_info["state"])
            if task_info["state"] in state_mapping:
                build_task_finalize.delay(
                    msg_id="producer::fail_lost_builds fake msg",
                    task_id=component_build.task_id,
                    build_new_state=state_mapping[task_info["state"]],
                    build_name=component_build.package,
                    build_release=build_release,
                    build_version=build_version,
                )

    elif conf.system == "mock":
        pass
Пример #15
0
def build_task_finalize(msg_id,
                        task_id,
                        build_new_state,
                        build_name,
                        build_version,
                        build_release,
                        module_build_id=None,
                        state_reason=None):
    """Called when corresponding Koji build task of a component build finishes

    When a task finishes, the task could be in state COMPLETE, FAILED or CANCELED.

    :param str msg_id: the original id of the message being handled which is
        received from the message bus.
    :param int task_id: the Koji build task id.
    :param int build_new_state: the state of the build. Refer to
        ``koji.BUILD_STATES`` for details. For this handler, values could be
        the corresponding integer value of COMPLETE, FAILED or CANCELED.
    :param str build_name: the build name.
    :param str build_version: the build version.
    :param str build_release: the build release.
    :param int module_build_id: optionally set when this event handler is
        scheduled from internal rather than just handling the received message.
        When set, the value should be the id of module build having the
        component build just built by the finished task.
    :param str state_reason: optional. When set a reason explicitly, the
        corresponding component build will have this reason as the
        ``state_reason``. Otherwise, a custom reason will be set for a failed
        build.
    """

    # First, find our ModuleBuild associated with this component, if any.
    component_build = models.ComponentBuild.from_component_event(
        db_session, task_id, module_id=module_build_id)
    nvr = "{}-{}-{}".format(build_name, build_version, build_release)

    if not component_build:
        log.debug("We have no record of %s", nvr)
        return

    log.info("Saw relevant component build of %r from %r.", nvr, msg_id)

    if state_reason:
        state_reason = state_reason
    elif build_new_state != koji.BUILD_STATES["COMPLETE"]:
        state_reason = "Failed to build artifact {} in Koji".format(build_name)
    else:
        state_reason = ""

    # Mark the state in the db.
    component_build.state = build_new_state
    component_build.nvr = nvr
    component_build.state_reason = state_reason
    db_session.commit()

    parent = component_build.module_build

    # If the macro build failed, then the module is doomed.
    if (component_build.package == "module-build-macros"
            and build_new_state != koji.BUILD_STATES["COMPLETE"]):
        parent.transition(
            db_session,
            conf,
            state=models.BUILD_STATES["failed"],
            state_reason=state_reason,
            failure_type="user",
        )
        db_session.commit()
        return

    if (component_build.buildonly and conf.system in ["koji", "test"]
            and build_new_state == koji.BUILD_STATES["COMPLETE"]):
        koji_session = get_session(conf, login=False)
        rpms = koji_session.listBuildRPMs(component_build.nvr)
        mmd = parent.mmd()
        for artifact in rpms:
            mmd.add_rpm_filter(artifact["name"])
        parent.modulemd = mmd_to_str(mmd)
        db_session.commit()

    parent_current_batch = parent.current_batch()

    # If there are no other components still building in a batch,
    # we can tag all successfully built components in the batch.
    unbuilt_components_in_batch = [
        c for c in parent_current_batch
        if c.is_waiting_for_build or c.is_building
    ]
    if not unbuilt_components_in_batch:
        failed_components_in_batch = [
            c for c in parent_current_batch if c.is_unsuccessful
        ]
        built_components_in_batch = [
            c for c in parent_current_batch if c.is_completed
        ]

        builder = GenericBuilder.create_from_module(db_session, parent, conf)

        if failed_components_in_batch:
            log.info(
                "Batch done, but not tagging because of failed component builds. Will "
                'transition the module to "failed"')
            state_reason = "Component(s) {} failed to build.".format(", ".join(
                c.package for c in failed_components_in_batch))
            parent.transition(
                db_session,
                conf,
                state=models.BUILD_STATES["failed"],
                state_reason=state_reason,
                failure_type="user",
            )
            db_session.commit()
            return []
        elif not built_components_in_batch:
            # If there are no successfully built components in a batch, there is nothing to tag.
            # The repository won't be regenerated in this case and therefore we generate fake repo
            # change message here.
            log.info("Batch done. No component to tag")
            from module_build_service.scheduler.handlers.repos import done as repos_done_handler
            events.scheduler.add(
                repos_done_handler,
                ("fake_msg", builder.module_build_tag["name"]))
        else:
            built_component_nvrs_in_batch = [
                c.nvr for c in built_components_in_batch
            ]
            # tag && add to srpm-build group if neccessary
            log.info("Batch done.  Tagging %i component(s) in the build tag." %
                     len(built_component_nvrs_in_batch))
            log.debug("%r" % built_component_nvrs_in_batch)
            # TODO: install=component_build.build_time_only works here because module-build-macros
            # is alone in its batch and the only component with build_time_only set. All other
            # batches will have install=False. If we expand to have batches with a mix of
            # build_time_only and not components, then this logic will need to change.
            builder.buildroot_add_artifacts(
                built_component_nvrs_in_batch,
                install=component_build.build_time_only)

            # Do not tag packages which only belong to the build tag to the dest tag
            component_nvrs_to_tag_in_dest = [
                c.nvr for c in built_components_in_batch
                if c.build_time_only is False
            ]
            log.info("Tagging %i component(s) in the dest tag." %
                     len(component_nvrs_to_tag_in_dest))
            if component_nvrs_to_tag_in_dest:
                builder.tag_artifacts(component_nvrs_to_tag_in_dest)

        db_session.commit()

    elif any(not c.is_building for c in unbuilt_components_in_batch):
        # We are not in the middle of the batch building and
        # we have some unbuilt components in this batch. We might hit the
        # concurrent builds threshold in previous call of continue_batch_build
        # done in repos.py:done(...), but because we have just finished one
        # build, try to call continue_batch_build again so in case we hit the
        # threshold previously, we will submit another build from this batch.
        builder = GenericBuilder.create_from_module(db_session, parent, conf)
        continue_batch_build(conf, parent, builder)
def handle_collisions_with_base_module_rpms(mmd, arches):
    """
    Find any RPMs in the buildrequired base modules that collide with the buildrequired modules.

    If a buildrequired module contains RPMs that overlap with RPMs in the buildrequired base
    modules, then the NEVRAs of the overlapping RPMs in the base modules will be added as conflicts
    in the input modulemd.

    :param Modulemd.ModuleStream mmd: the modulemd to find the collisions
    :param list arches: the arches to limit the external repo queries to
    :raise RuntimeError: when a Koji query fails
    """
    log.info(
        "Finding any buildrequired modules that collide with the RPMs in the base modules"
    )
    bm_tags = set()
    non_bm_tags = set()
    xmd = mmd.get_xmd()
    buildrequires = xmd["mbs"]["buildrequires"]
    for name, info in buildrequires.items():
        if not info["koji_tag"]:
            continue

        if name in conf.base_module_names:
            bm_tags.add(info["koji_tag"])
        else:
            non_bm_tags.add(info["koji_tag"])

    if not (bm_tags and non_bm_tags):
        log.info(
            "Skipping the collision check since collisions are not possible with these "
            "buildrequires")
        return

    log.debug(
        "Querying Koji for the latest RPMs from the buildrequired base modules from the tags: %s",
        ", ".join(bm_tags),
    )
    koji_session = get_session(conf, login=False)
    bm_rpms = _get_rpms_from_tags(koji_session, list(bm_tags), arches)
    # The keys are base module RPM names and the values are sets of RPM NEVRAs with that name
    name_to_nevras = {}
    for bm_rpm in bm_rpms:
        rpm_name = kobo.rpmlib.parse_nvra(bm_rpm)["name"]
        name_to_nevras.setdefault(rpm_name, set())
        name_to_nevras[rpm_name].add(bm_rpm)
    # Clear this out of RAM as soon as possible since this value can be huge
    del bm_rpms

    log.debug(
        "Querying Koji for the latest RPMs from the other buildrequired modules from the tags: %s",
        ", ".join(non_bm_tags),
    )
    # This will contain any NEVRAs of RPMs in the base module tag with the same name as those in the
    # buildrequired modules
    conflicts = set()
    non_bm_rpms = _get_rpms_from_tags(koji_session, list(non_bm_tags), arches)
    for rpm in non_bm_rpms:
        rpm_name = kobo.rpmlib.parse_nvra(rpm)["name"]
        if rpm_name in name_to_nevras:
            conflicts = conflicts | name_to_nevras[rpm_name]

    # Add the conflicting NEVRAs to `ursine_rpms` so the Conflicts are later generated for them
    # in the KojiModuleBuilder.
    xmd["mbs"]["ursine_rpms"] = list(conflicts)
    mmd.set_xmd(xmd)
Пример #17
0
    def get_buildrequired_koji_builds(self, name, stream, base_module_mmd):
        """
        Returns list of Koji build dicts of all module builds with `name` and `stream` which are
        tagged in the Koji tag defined in `base_module_mmd`.

        :param str name: Name of module to return.
        :param str stream: Stream of module to return.
        :param Modulemd base_module_mmd: Base module metadata.
        :return list: List of Koji build dicts.
        """
        # Get the `koji_tag_with_modules`. If the `koji_tag_with_modules` is not configured for
        # the base module, fallback to DBResolver.
        tag = base_module_mmd.get_xmd().get("mbs", {}).get("koji_tag_with_modules")
        if not tag:
            return []

        koji_session = get_session(conf, login=False)
        event = koji_session.getLastEvent()

        # List all the modular builds in the modular Koji tag.
        # We cannot use latest=True here, because we need to get all the
        # available streams of all modules. The stream is represented as
        # "version" in Koji build and with latest=True, Koji would return
        # only builds with the highest version.
        # We also cannot ask for particular `stream`, because Koji does not support that.
        module_builds = koji_session.listTagged(
            tag, inherit=True, type="module", package=name, event=event["id"])

        # Filter out different streams. Note that the stream name in the b["version"] is
        # normalized. This makes it impossible to find out its original value. We therefore
        # filter out only completely different stream names here to reduce the `module_builds`
        # dramatically, but the resulting `module_builds` list might still contain unwanted
        # streams. We will get rid of them using the `_filter_based_on_real_stream_name` method
        # later.
        # Example of such streams: "fedora-30" and "fedora_30". They will both be normalized to
        # "fedora_30".
        normalized_stream = stream.replace("-", "_")
        module_builds = [b for b in module_builds if b["version"] == normalized_stream]

        # Filter out builds inherited from non-top tag
        module_builds = self._filter_inherited(koji_session, module_builds, tag, event)

        # Filter out modules based on the real stream name.
        module_builds = self._filter_based_on_real_stream_name(koji_session, module_builds, stream)

        # Find the latest builds of all modules. This does the following:
        # - Sorts the module_builds descending by Koji NVR (which maps to NSV
        #   for modules). Split release into modular version and context, and
        #   treat version as numeric.
        # - Groups the sorted module_builds by NV (NS in modular world).
        #   In each resulting `ns_group`, the first item is actually build
        #   with the latest version (because the list is still sorted by NVR).
        # - Groups the `ns_group` again by "release" ("version" in modular
        #   world) to just get all the "contexts" of the given NSV. This is
        #   stored in `nsv_builds`.
        # - The `nsv_builds` contains the builds representing all the contexts
        #   of the latest version for give name-stream, so add them to
        #   `latest_builds`.
        def _key(build):
            ver, ctx = build["release"].split(".", 1)
            return build["name"], build["version"], int(ver), ctx

        latest_builds = []
        module_builds = sorted(module_builds, key=_key, reverse=True)
        for _, ns_builds in groupby(
                module_builds, key=lambda x: ":".join([x["name"], x["version"]])):
            for _, nsv_builds in groupby(
                    ns_builds, key=lambda x: x["release"].split(".")[0]):
                latest_builds += list(nsv_builds)
                break
        return latest_builds
Пример #18
0
def test_get_anonymous_session(mock_session):
    mbs_config = mock.Mock(koji_profile="koji", koji_config="conf/koji.conf")
    session = get_session(mbs_config, login=False)
    assert mock_session.return_value == session
    assert mock_session.return_value.krb_login.assert_not_called
Пример #19
0
    def _koji_rpms_in_tag(self, tag):
        """ Return the list of koji rpms in a tag. """
        log.debug("Listing rpms in koji tag %s", tag)
        session = get_session(self.config, login=False)

        try:
            rpms, builds = session.listTaggedRPMS(tag, latest=True)
        except koji.GenericError:
            log.exception("Failed to list rpms in tag %r", tag)
            # If the tag doesn't exist.. then there are no rpms in that tag.
            return []

        # Module does not contain any RPM, so return an empty list.
        if not rpms:
            return []

        # Get the exclusivearch, excludearch and license data for each RPM.
        # The exclusivearch and excludearch lists are set in source RPM from which the RPM
        # was built.
        # Create temporary dict with source RPMs in rpm_id:rpms_list_index format.
        src_rpms = {}
        binary_rpms = {}
        for rpm in rpms:
            if rpm["arch"] == "src":
                src_rpms[rpm["id"]] = rpm
            else:
                binary_rpms[rpm["id"]] = rpm
        # Prepare the arguments for Koji multicall.
        # We will call session.getRPMHeaders(...) for each SRC RPM to get exclusivearch,
        # excludearch and license headers.
        multicall_kwargs = [
            {"rpmID": rpm_id, "headers": ["exclusivearch", "excludearch", "license"]}
            for rpm_id in src_rpms.keys()
        ]
        # For each binary RPM, we only care about the "license" header.
        multicall_kwargs += [
            {"rpmID": rpm_id, "headers": ["license"]} for rpm_id in binary_rpms.keys()
        ]
        rpms_headers = koji_retrying_multicall_map(
            session, session.getRPMHeaders, list_of_kwargs=multicall_kwargs
        )

        # Temporary dict with build_id as a key to find builds easily.
        builds = {build["build_id"]: build for build in builds}

        # Create a mapping of build IDs to SRPM NEVRAs so that the for loop below can directly
        # access these values when adding the `srpm_nevra` key to the returned RPMs
        build_id_to_srpm_nevra = {
            srpm["build_id"]: kobo.rpmlib.make_nvra(srpm, force_epoch=True)
            for srpm in src_rpms.values()
        }
        # Handle the multicall result. For each build associated with the source RPM,
        # store the exclusivearch and excludearch lists. For each RPM, store the 'license' and
        # also other useful data from the Build associated with the RPM.
        for rpm, headers in zip(chain(src_rpms.values(), binary_rpms.values()), rpms_headers):
            if not headers:
                raise RuntimeError("No RPM headers received from Koji for RPM %s" % rpm["name"])
            if "license" not in headers:
                raise RuntimeError(
                    "No RPM 'license' header received from Koji for RPM %s" % rpm["name"])
            build = builds[rpm["build_id"]]
            if "exclusivearch" in headers and "excludearch" in headers:
                build["exclusivearch"] = headers["exclusivearch"]
                build["excludearch"] = headers["excludearch"]

            rpm["license"] = headers["license"]
            rpm["srpm_name"] = build["name"]
            rpm["srpm_nevra"] = build_id_to_srpm_nevra[rpm["build_id"]]
            rpm["exclusivearch"] = build["exclusivearch"]
            rpm["excludearch"] = build["excludearch"]

        return rpms