Exemplo n.º 1
0
    def _prepare_file_directory(self):
        """ Creates a temporary directory that will contain all the files
        mentioned in the outputs section

        Returns path to the temporary directory
        """
        prepdir = tempfile.mkdtemp(prefix="koji-cg-import")
        mmd_path = os.path.join(prepdir, "modulemd.txt")
        log.info("Writing generic modulemd.yaml to %r" % mmd_path)
        with open(mmd_path, "w", encoding="utf-8") as mmd_f:
            mmd_f.write(self._get_fixed_mmd())

        mmd_path = os.path.join(prepdir, "modulemd.src.txt")
        self._download_source_modulemd(self.module.mmd(), mmd_path)

        for arch in self.arches:
            mmd_path = os.path.join(prepdir, "modulemd.%s.txt" % arch)
            log.info("Writing %s modulemd.yaml to %r" % (arch, mmd_path))
            mmd = self._finalize_mmd(arch)
            with open(mmd_path, "w", encoding="utf-8") as mmd_f:
                mmd_f.write(mmd)

        log_path = os.path.join(prepdir, "build.log")
        try:
            source = build_logs.path(db_session, self.module)
            log.info("Moving logs from %r to %r" % (source, log_path))
            shutil.copy(source, log_path)
        except IOError as e:
            log.exception(e)
        return prepdir
Exemplo n.º 2
0
    def query_policies(self, return_all=False):
        """
        Query policies to greenwave
        :param return_all: Return all policies, if False select by subject_type and decision_context
        :type return_all: bool
        :return: response
        :rtype: dict
        """
        response = self._greenwave_query('policies')

        if return_all:
            return response

        try:
            selective_resp = {
                "policies": [
                    pol for pol in response["policies"]
                    if pol["decision_context"] == self.decision_context
                    and pol["subject_type"] == self.subject_type
                ]
            }
        except KeyError:
            log.exception(
                "Incorrect greenwave response (Mandatory key is missing)")
            raise GreenwaveError(
                "Incorrect greenwave response (Mandatory key is missing)")
        return selective_resp
Exemplo n.º 3
0
    def is_released_as_per_schedule(pp_release):
        """
        Check if the specified scheduled task date has been reached. Returns True if it has.
        """
        if not conf.product_pages_schedule_task_name:
            log.debug(config_msg, "product_pages_schedule_task_name")
            return False

        schedule_url = "{}/api/v7/releases/{}/schedule-tasks/?fields=name,date_finish".format(
            conf.product_pages_url.rstrip("/"), pp_release)

        try:
            pp_rv = requests.get(schedule_url, timeout=15)
            pp_json = pp_rv.json()
            # Catch requests failures and JSON parsing errors
        except (requests.exceptions.RequestException, ValueError):
            log.exception(
                "The query to the Product Pages at %s failed. Assuming it is not available.",
                schedule_url,
            )
            return False

        name = conf.product_pages_schedule_task_name.lower().strip()
        for task in pp_json:
            if task['name'].lower().strip() == name:
                task_date = task['date_finish']
                if datetime.strptime(task_date, "%Y-%m-%d").date() >= datetime.utcnow().date():
                    log.debug(
                        "The task date %s hasn't been reached yet. Not adding a stream suffix.",
                        task_date
                    )
                    return False
                return True
        # Schedule task not available; rely on GA date
        return False
Exemplo n.º 4
0
def start_build_component(db_session, builder, c):
    """
    Submits single component build to builder. Called in thread
    by QueueBasedThreadPool in continue_batch_build.

    This function runs inside separate threads that share one SQLAlchemy
    session object to update a module build state once there is something wrong
    when one of its components is submitted to Koji to build.
    """
    import koji

    try:
        c.task_id, c.state, c.state_reason, c.nvr = builder.build(
            artifact_name=c.package, source=c.scmurl)
    except Exception as e:
        c.state = koji.BUILD_STATES["FAILED"]
        c.state_reason = "Failed to build artifact %s: %s" % (c.package, str(e))
        log.exception(e)
        with BUILD_COMPONENT_DB_SESSION_LOCK:
            c.module_build.transition(conf, models.BUILD_STATES["failed"], failure_type="infra")
            db_session.commit()
        return

    if not c.task_id and c.is_building:
        c.state = koji.BUILD_STATES["FAILED"]
        c.state_reason = "Failed to build artifact %s: Builder did not return task ID" % (c.package)
        with BUILD_COMPONENT_DB_SESSION_LOCK:
            c.module_build.transition(conf, models.BUILD_STATES["failed"], failure_type="infra")
            db_session.commit()
        return
Exemplo n.º 5
0
    def is_released(pp_release, url):
        """
        Check if the stream has been released. Return True if it has.
        """
        try:
            pp_rv = requests.get(url, timeout=15)
            pp_json = pp_rv.json()
        # Catch requests failures and JSON parsing errors
        except (requests.exceptions.RequestException, ValueError):
            log.exception(
                "The query to the Product Pages at %s failed. Assuming it is not yet released.",
                url,
            )
            return False

        ga_date = pp_json.get("ga_date")
        if not ga_date:
            log.debug("A release date for the release %s could not be determined", pp_release)
            return False

        if datetime.strptime(ga_date, "%Y-%m-%d").date() >= datetime.utcnow().date():
            log.debug(
                "The release %s hasn't been released yet. Not adding a stream suffix.",
                ga_date
            )
            return False
        return True
def _get_default_modules(stream, default_modules_scm_url):
    """
    Get the base module's default modules.

    :param str stream: the stream of the base module
    :param str default_modules_scm_url: the SCM URL to the default modules
    :return: a dictionary where the keys are default module names and the values are default module
        streams
    :rtype: dict
    :raise RuntimeError: if no default modules can be retrieved for that stream
    """
    scm_obj = scm.SCM(default_modules_scm_url)
    temp_dir = tempfile.mkdtemp()
    try:
        log.debug("Cloning the default modules repo at %s",
                  default_modules_scm_url)
        scm_obj.clone(temp_dir)
        log.debug("Checking out the branch %s", stream)
        try:
            scm_obj.checkout_ref(stream)
        except UnprocessableEntity:
            # If the checkout fails, try seeing if this is a rawhide build. In this case, the branch
            # should actually be conf.rawhide_branch. The check to see if this is a rawhide build
            # is done after the first checkout failure for performance reasons, since it avoids an
            # unnecessary connection and query to Koji.
            if conf.uses_rawhide:
                log.debug(
                    "Checking out the branch %s from the default modules repo failed. Trying to "
                    "determine if this stream represents rawhide.",
                    stream,
                )
                if _get_rawhide_version() == stream:
                    log.debug(
                        "The stream represents rawhide, will try checking out %s",
                        conf.rawhide_branch,
                    )
                    # There's no try/except here because we want the outer except block to
                    # catch this in the event the rawhide branch doesn't exist
                    scm_obj.checkout_ref(conf.rawhide_branch)
                else:
                    # If it's not a rawhide build, then the branch should have existed
                    raise
            else:
                # If it's not a rawhide build, then the branch should have existed
                raise

        idx = Modulemd.ModuleIndex.new()
        idx.update_from_defaults_directory(
            path=scm_obj.sourcedir,
            overrides_path=os.path.join(scm_obj.sourcedir, "overrides"),
            strict=True,
        )
        return idx.get_default_streams()
    except:  # noqa: E722
        msg = "Failed to retrieve the default modules"
        log.exception(msg)
        raise RuntimeError(msg)
    finally:
        shutil.rmtree(temp_dir)
Exemplo n.º 7
0
    def patch(self, api_version, id):
        username, groups = module_build_service.web.auth.get_user(request)

        try:
            r = json.loads(request.get_data().decode("utf-8"))
        except Exception:
            log.exception("Invalid JSON submitted")
            raise ValidationError("Invalid JSON submitted")

        if "owner" in r:
            if conf.no_auth is not True:
                raise ValidationError(
                    "The request contains 'owner' parameter, however NO_AUTH is not allowed"
                )
            elif username == "anonymous":
                username = r["owner"]

        self.check_groups(username, groups)

        module = models.ModuleBuild.query.filter_by(id=id).first()
        if not module:
            raise NotFound("No such module found.")

        if module.owner != username and not (conf.admin_groups & groups):
            raise Forbidden(
                "You are not owner of this build and therefore cannot modify it."
            )

        if not r.get("state"):
            log.error("Invalid JSON submitted")
            raise ValidationError("Invalid JSON submitted")

        state = r["state"]
        valid_input_states = ("failed", str(models.BUILD_STATES["failed"]))
        if state not in valid_input_states:
            raise ValidationError(
                "An invalid state was submitted. Valid states values are: {}".
                format(", ".join(valid_input_states)))

        valid_states_to_cancel = ("build", "init", "wait")
        module_state_name = models.INVERSE_BUILD_STATES[module.state]
        if module_state_name not in valid_states_to_cancel:
            log.error(
                "The user %s attempted to cancel a build in the %s state",
                username,
                module_state_name,
            )
            raise ValidationError(
                "To cancel a module build, it must be in one of the following states: {}"
                .format(", ".join(valid_states_to_cancel)))

        module.transition(db.session, conf, models.BUILD_STATES["failed"],
                          "Canceled by %s." % username)
        db.session.add(module)
        db.session.commit()

        return jsonify(module.extended_json(db.session, True,
                                            api_version)), 200
Exemplo n.º 8
0
def _dict_from_request(request):
    if "multipart/form-data" in request.headers.get("Content-Type", ""):
        data = request.form.to_dict()
    else:
        try:
            data = json.loads(request.get_data().decode("utf-8"))
        except Exception:
            log.exception("Invalid JSON submitted")
            raise ValidationError("Invalid JSON submitted")
    return data
Exemplo n.º 9
0
    def process_message(self, event_info):
        # Choose a handler for this message
        handler, build = self._map_message(db_session, event_info)

        if handler is None:
            log.debug("No event handler associated with msg %s",
                      event_info["msg_id"])
            return

        idx = "%s: %s, %s" % (handler.__name__, event_info["event"],
                              event_info["msg_id"])

        if handler is no_op_handler:
            log.debug("Handler is NO_OP: %s", idx)
            return

        if not build:
            log.debug("No module associated with msg %s", event_info["msg_id"])
            return

        MBSConsumer.current_module_build_id = build.id

        log.info("Calling %s", idx)

        kwargs = event_info.copy()
        kwargs.pop("event")

        try:
            if conf.celery_broker_url:
                # handlers are also Celery tasks, when celery_broker_url is configured,
                # call "delay" method to run the handlers as Celery async tasks
                func = getattr(handler, "delay")
                func(**kwargs)
            else:
                handler(**kwargs)
        except Exception as e:
            log.exception("Could not process message handler.")
            db_session.rollback()
            db_session.refresh(build)
            build.transition(
                db_session,
                conf,
                state=models.BUILD_STATES["failed"],
                state_reason=str(e),
                failure_type="infra",
            )
            db_session.commit()

            # Allow caller to do something when error is occurred.
            raise
        finally:
            MBSConsumer.current_module_build_id = None
            log.debug("Done with %s", idx)
Exemplo n.º 10
0
    def _greenwave_query(self, query_type, payload=None):
        """
        Make a query to greenwave
        :param query_type: will be part of url
        :type query_type: str
        :param payload: request payload used in 'decision' query
        :type payload: str
        :return: response
        :rtype: dict
        """
        query_func = requests.post if payload else requests.get
        kwargs = {
            "url": "{0}/{1}".format(self.url, query_type),
            "timeout": self.timeout
        }

        if payload:
            kwargs["headers"] = {"Content-Type": "application/json"}
            kwargs["data"] = payload

        try:
            response = query_func(**kwargs)
        except requests.exceptions.Timeout:
            raise GreenwaveError("Greenwave request timed out")
        except Exception as exc:
            error_message = "Unspecified greenwave request error " \
                            '(original exception was: "{0}")'.format(str(exc))
            log.exception(error_message)
            raise GreenwaveError(error_message)

        try:
            resp_json = response.json()
        except ValueError:
            log.debug("Greenwave response content (status {0}): {1}".format(
                response.status_code, response.text))
            raise GreenwaveError("Greenwave returned invalid JSON.")

        log.debug('Query to Greenwave (%s) result: status=%d, content="%s"',
                  kwargs["url"], response.status_code, resp_json)

        if response.status_code == 200:
            return resp_json

        try:
            err_msg = resp_json["message"]
        except KeyError:
            err_msg = response.text
        raise GreenwaveError(
            "Greenwave returned {0} status code. Message: {1}".format(
                response.status_code, err_msg))
Exemplo n.º 11
0
def load_mmd(yaml, is_file=False):
    if not yaml:
        raise UnprocessableEntity('The input modulemd was empty')

    target_mmd_version = Modulemd.ModuleStreamVersionEnum.TWO
    try:
        if is_file:
            mmd = Modulemd.ModuleStream.read_file(yaml, True)
        else:
            mmd = Modulemd.ModuleStream.read_string(to_text_type(yaml), True)
        mmd.validate()
        if mmd.get_mdversion() < target_mmd_version:
            mmd = mmd.upgrade(target_mmd_version)
        elif mmd.get_mdversion() > target_mmd_version:
            log.error("Encountered a modulemd file with the version %d", mmd.get_mdversion())
            raise UnprocessableEntity(
                "The modulemd version cannot be greater than {}".format(target_mmd_version))
    except ModuleMDError as e:
        not_found = False
        if is_file:
            error = "The modulemd {} is invalid.".format(os.path.basename(yaml))
            if os.path.exists(yaml):
                with open(yaml, "rt") as yaml_hdl:
                    log.debug("Modulemd that failed to load:\n%s", yaml_hdl.read())
            else:
                not_found = True
                error = "The modulemd file {} was not found.".format(os.path.basename(yaml))
                log.error("The modulemd file %s was not found.", yaml)
        else:
            error = "The modulemd is invalid."
            log.debug("Modulemd that failed to load:\n%s", yaml)

        if "modulemd-error-quark: " in str(e):
            error = "{} The error was '{}'.".format(
                error, str(e).split("modulemd-error-quark: ")[-1])
        elif "Unknown ModuleStream version" in str(e):
            error = (
                "{}. The modulemd version can't be greater than {}."
                .format(error, target_mmd_version)
            )
        elif not_found is False:
            error = "{} Please verify the syntax is correct.".format(error)

        log.exception(error)
        raise UnprocessableEntity(error)

    return mmd
Exemplo n.º 12
0
 def get_user_membership(self, uid):
     """ Gets the group membership of a user
     :param uid: a string of the uid of the user
     :return: a list of common names of the posixGroups the user is a member of
     """
     ldap_filter = "(memberUid={0})".format(uid)
     # Only get the groups in the base container/OU
     self.connection.search(self.base_dn,
                            ldap_filter,
                            search_scope=ldap3.LEVEL,
                            attributes=["cn"])
     groups = self.connection.response
     try:
         return [group["attributes"]["cn"][0] for group in groups]
     except KeyError:
         log.exception(
             "The LDAP groups could not be determined based on the search results "
             'of "{0}"'.format(str(groups)))
         return []
Exemplo n.º 13
0
def _scm_get_latest(pkg):
    try:
        # If the modulemd specifies that the 'f25' branch is what
        # we want to pull from, we need to resolve that f25 branch
        # to the specific commit available at the time of
        # submission (now).
        repo = pkg.get_repository()
        ref = pkg.get_ref()
        log.debug("Getting the commit hash for the ref %s on the repo %s", ref,
                  repo)
        pkgref = module_build_service.common.scm.SCM(repo).get_latest(ref)
    except Exception as e:
        log.exception(e)
        return {
            "error":
            "Failed to get the latest commit for %s#%s" %
            (pkg.get_repository(), pkg.get_ref())
        }

    return {"pkg_name": pkg.get_name(), "pkg_ref": pkgref, "error": None}
Exemplo n.º 14
0
    def consume(self, message):
        monitor.messaging_rx_counter.inc()

        # Sometimes, the messages put into our queue are artificially put there
        # by other parts of our own codebase.  If they are already abstracted
        # messages, then just use them as-is.  If they are not already
        # instances of our message abstraction base class, then first transform
        # them before proceeding.
        if "event" in message:
            event_info = message
        else:
            try:
                event_info = self.get_abstracted_event_info(message)
                self.validate_event(event_info)
            except IgnoreMessage as e:
                log.debug(str(e))
                return

        if event_info is None:
            return

        # Primary work is done here.
        try:
            self.process_message(event_info)
            monitor.messaging_rx_processed_ok_counter.inc()
        except sqlalchemy.exc.OperationalError as error:
            monitor.messaging_rx_failed_counter.inc()
            if "could not translate host name" in str(error):
                log.exception(
                    "SQLAlchemy can't resolve DNS records. Scheduling fedmsg-hub to shutdown."
                )
                self.shutdown()
            else:
                raise
        except Exception:
            monitor.messaging_rx_failed_counter.inc()
        finally:
            db_session.remove()

        if self.stop_condition and self.stop_condition(message):
            self.shutdown()
Exemplo n.º 15
0
    def koji_import(self, devel=False):
        """This method imports given module into the configured koji instance as
        a content generator based build

        Raises an exception when error is encountered during import

        :param bool devel: True if the "-devel" module should be created and imported.
            The "-devel" module build contains only the RPMs which are normally filtered
            from the module build. If set to False, normal module build respecting the
            filters is created and imported.
        """
        self.devel = devel
        session = get_session(self.config)
        self._load_koji_tag(session)

        file_dir = self._prepare_file_directory()
        metadata = self._get_content_generator_metadata(file_dir)
        try:
            serverdir = self._upload_outputs(session, metadata, file_dir)
            try:
                build_info = session.CGImport(metadata, serverdir)
            except koji.GenericError as e:
                if "Build already exists" not in str(e):
                    raise
                log.warning("Failed to import content generator")
                build_info = None
            if conf.koji_cg_tag_build:
                self._tag_cg_build()
            if build_info is not None:
                log.info("Content generator import done.")
                log.debug(json.dumps(build_info, sort_keys=True, indent=4))

                # Only remove the logs if CG import was successful.  If it fails,
                # then we want to keep them around for debugging.
                log.info("Removing %r", file_dir)
                shutil.rmtree(file_dir)
        except Exception as e:
            log.exception("Content generator import failed: %s", e)
            raise e
Exemplo n.º 16
0
def get_user_oidc(request):
    """
    Returns the client's username and groups based on the OIDC token provided.
    """
    _load_secrets()

    if "authorization" not in request.headers:
        raise Unauthorized("No 'authorization' header found.")

    header = request.headers["authorization"].strip()
    prefix = "Bearer "
    if not header.startswith(prefix):
        raise Unauthorized("Authorization headers must start with %r" % prefix)

    token = header[len(prefix):].strip()
    try:
        data = _get_token_info(token)
    except Exception as e:
        error = "Cannot verify OIDC token: %s" % str(e)
        log.exception(error)
        raise Exception(error)

    if not data or "active" not in data or not data["active"]:
        raise Unauthorized("OIDC token invalid or expired.")

    if "OIDC_REQUIRED_SCOPE" not in app.config:
        raise Forbidden("OIDC_REQUIRED_SCOPE must be set in server config.")

    presented_scopes = data["scope"].split(" ")
    required_scopes = [
        "openid",
        "https://id.fedoraproject.org/scope/groups",
        app.config["OIDC_REQUIRED_SCOPE"],
    ]
    for scope in required_scopes:
        if scope not in presented_scopes:
            raise Unauthorized("Required OIDC scope %r not present: %r" %
                               (scope, presented_scopes))

    try:
        extended_data = _get_user_info(token)
    except Exception:
        error = "OpenIDC auth error: Cannot determine the user's groups"
        log.exception(error)
        raise Unauthorized(error)

    username = data["username"]
    # If the user is part of the whitelist, then the group membership check is skipped
    if username in conf.allowed_users:
        groups = set()
    else:
        try:
            groups = set(extended_data["groups"])
        except Exception:
            error = "Could not find groups in UserInfo from OIDC"
            log.exception("%s (extended_data: %s)", error, extended_data)
            raise Unauthorized(error)

    return username, groups
Exemplo n.º 17
0
    def default_buildroot_groups(cls, db_session, module):
        try:
            mmd = module.mmd()
            resolver = GenericResolver.create(db_session, conf)

            # Resolve default buildroot groups using the MBS, but only for
            # non-local modules.
            groups = resolver.resolve_profiles(mmd,
                                               ("buildroot", "srpm-buildroot"))
            groups = {
                "build": groups["buildroot"],
                "srpm-build": groups["srpm-buildroot"]
            }
        except ValueError:
            reason = "Failed to gather buildroot groups from SCM."
            log.exception(reason)
            module.transition(db_session,
                              conf,
                              state=BUILD_STATES["failed"],
                              state_reason=reason,
                              failure_type="user")
            db_session.commit()
            raise
        return groups
Exemplo n.º 18
0
def init(msg_id, module_build_id, module_build_state):
    """Called whenever a module enters the 'init' state.

    :param str msg_id: the original id of the message being handled, which is
        received from message bus.
    :param int module_build_id: the module build id.
    :param int module_build_state: the module build state.
    """
    build = models.ModuleBuild.get_by_id(db_session, module_build_id)

    state_init = models.BUILD_STATES["init"]
    if module_build_state == state_init and build.state != state_init:
        log.warning("Module build %r has moved to %s state already.", build,
                    models.INVERSE_BUILD_STATES[build.state])
        log.warning(
            "Ignore this message %s. Is there something wrong with the frontend"
            " that sends duplicate messages?", msg_id)
        return

    # for MockModuleBuilder, set build logs dir to mock results dir
    # before build_logs start
    if conf.system == "mock":
        build_tag_name = generate_module_build_koji_tag(build)
        mock_resultsdir = os.path.join(conf.mock_resultsdir, build_tag_name)
        if not os.path.exists(mock_resultsdir):
            os.makedirs(mock_resultsdir)
        build_logs.build_logs_dir = mock_resultsdir

    build_logs.start(db_session, build)
    log.info("Start to handle %s which is in init state.",
             build.mmd().get_nsvc())

    error_msg = ""
    failure_reason = "unspec"
    try:
        mmd = build.mmd()
        record_module_build_arches(mmd, build)
        arches = [arch.name for arch in build.arches]
        defaults_added = add_default_modules(mmd)

        # Get map of packages that have SRPM overrides
        srpm_overrides = get_module_srpm_overrides(build)
        # Format the modulemd by putting in defaults and replacing streams that
        # are branches with commit hashes
        format_mmd(mmd, build.scmurl, build, db_session, srpm_overrides)
        record_component_builds(mmd, build)

        # The ursine.handle_stream_collision_modules is Koji specific.
        # It is also run only when Ursa Prime is not enabled for the base
        # module (`if not defaults_added`).
        if conf.system in ["koji", "test"] and not defaults_added:
            handle_stream_collision_modules(mmd)

        # Sets xmd["mbs"]["ursine_rpms"] with RPMs from the buildrequired base modules which
        # conflict with the RPMs from other buildrequired modules. This is done to prefer modular
        # RPMs over base module RPMs even if their NVR is lower.
        if conf.system in ("koji", "test"):
            handle_collisions_with_base_module_rpms(mmd, arches)
        else:
            log.warning(
                "The necessary conflicts could not be generated due to RHBZ#1693683. "
                "Some RPMs from the base modules (%s) may end up being used over modular RPMs. "
                "This may result in different behavior than a production build.",
                ", ".join(conf.base_module_names))

        mmd = record_filtered_rpms(mmd)
        build.modulemd = mmd_to_str(mmd)
        build.transition(db_session, conf, models.BUILD_STATES["wait"])
    # Catch custom exceptions that we can expose to the user
    except (UnprocessableEntity, Forbidden, ValidationError,
            RuntimeError) as e:
        log.exception(str(e))
        error_msg = str(e)
        failure_reason = "user"
    except (xmlrpclib.ProtocolError, koji.GenericError) as e:
        log.exception(str(e))
        error_msg = 'Koji communication error: "{0}"'.format(str(e))
        failure_reason = "infra"
    except Exception as e:
        log.exception(str(e))
        error_msg = "An unknown error occurred while validating the modulemd"
        failure_reason = "user"
    else:
        db_session.add(build)
        db_session.commit()
    finally:
        if error_msg:
            # Rollback changes underway
            db_session.rollback()
            build.transition(
                db_session,
                conf,
                models.BUILD_STATES["failed"],
                state_reason=error_msg,
                failure_type=failure_reason,
            )
            db_session.commit()
def _get_rpms_in_external_repo(repo_url, arches, cache_dir_name):
    """
    Get the available RPMs in the external repo for the provided arches.

    :param str repo_url: the URL of the external repo with the "$arch" variable included
    :param list arches: the list of arches to query the external repo for
    :param str cache_dir_name: the cache directory name under f"{conf.cache_dir}/dnf"
    :return: a set of the RPM NEVRAs
    :rtype: set
    :raise RuntimeError: if the cache is not writeable or the external repo couldn't be loaded
    :raises ValueError: if there is no "$arch" variable in repo URL
    """
    if "$arch" not in repo_url:
        raise ValueError(
            "The external repo {} does not contain the $arch variable".format(
                repo_url))

    base = dnf.Base()
    try:
        dnf_conf = base.conf
        # Expire the metadata right away so that when a repo is loaded, it will always check to
        # see if the external repo has been updated
        dnf_conf.metadata_expire = 0

        cache_location = os.path.join(conf.cache_dir, "dnf", cache_dir_name)
        try:
            # exist_ok=True can't be used in Python 2
            os.makedirs(cache_location, mode=0o0770)
        except OSError as e:
            # Don't fail if the directories already exist
            if e.errno != errno.EEXIST:
                log.exception("Failed to create the cache directory %s",
                              cache_location)
                raise RuntimeError("The MBS cache is not writeable.")

        # Tell DNF to use the cache directory
        dnf_conf.cachedir = cache_location
        # Don't skip repos that can't be synchronized
        dnf_conf.skip_if_unavailable = False
        dnf_conf.timeout = conf.dnf_timeout
        # Get rid of everything to be sure it's a blank slate. This doesn't delete the cached repo
        # data.
        base.reset(repos=True, goal=True, sack=True)

        # Add a separate repo for each architecture
        for arch in arches:
            # Convert arch to canon_arch. This handles cases where Koji "i686" arch is mapped to
            # "i386" when generating RPM repository.
            canon_arch = koji.canonArch(arch)
            repo_name = "repo_{}".format(canon_arch)
            repo_arch_url = repo_url.replace("$arch", canon_arch)
            base.repos.add_new_repo(
                repo_name,
                dnf_conf,
                baseurl=[repo_arch_url],
                minrate=conf.dnf_minrate,
            )

        try:
            # Load the repos in parallel
            base.update_cache()
        except dnf.exceptions.RepoError:
            msg = "Failed to load the external repos"
            log.exception(msg)
            raise RuntimeError(msg)

        # dnf will not always raise an error on repo failures, so we check explicitly
        for repo_name in base.repos:
            if not base.repos[repo_name].metadata:
                msg = "Failed to load metadata for repo %s" % repo_name
                log.exception(msg)
                raise RuntimeError(msg)

        base.fill_sack(load_system_repo=False)

        # Return all the available RPMs
        nevras = set()
        for rpm in base.sack.query().available():
            rpm_dict = {
                "arch": rpm.arch,
                "epoch": rpm.epoch,
                "name": rpm.name,
                "release": rpm.release,
                "version": rpm.version,
            }
            nevra = kobo.rpmlib.make_nvra(rpm_dict, force_epoch=True)
            nevras.add(nevra)
    finally:
        base.close()

    return nevras
Exemplo n.º 20
0
def runtimeerror_error(e):
    """Flask error handler for RuntimeError exceptions"""
    log.exception("RuntimeError exception raised")
    return json_error(500, "Internal Server Error", str(e))
Exemplo n.º 21
0
def wait(msg_id, module_build_id, module_build_state):
    """ Called whenever a module enters the 'wait' state.

    We transition to this state shortly after a modulebuild is first requested.

    All we do here is request preparation of the buildroot.
    The kicking off of individual component builds is handled elsewhere,
    in module_build_service.schedulers.handlers.repos.

    :param str msg_id: the original id of the message being handled which is
        received from the message bus.
    :param int module_build_id: the module build id.
    :param int module_build_state: the module build state.
    """
    build = models.ModuleBuild.get_by_id(db_session, module_build_id)

    log.info("Found build=%r from message" % build)
    log.debug("%r", build.modulemd)

    if build.state != module_build_state:
        log.warning(
            "Note that retrieved module state %r doesn't match message module state %r",
            build.state,
            module_build_state,
        )
        # This is ok.. it's a race condition we can ignore.
        pass

    try:
        build_deps = get_module_build_dependencies(build)
    except ValueError:
        reason = "Failed to get module info from MBS. Max retries reached."
        log.exception(reason)
        build.transition(db_session,
                         conf,
                         state=models.BUILD_STATES["failed"],
                         state_reason=reason,
                         failure_type="infra")
        db_session.commit()
        raise

    tag = generate_module_build_koji_tag(build)
    log.debug("Found tag=%s for module %r" % (tag, build))
    # Hang on to this information for later.  We need to know which build is
    # associated with which koji tag, so that when their repos are regenerated
    # in koji we can figure out which for which module build that event is
    # relevant.
    log.debug("Assigning koji tag=%s to module build" % tag)
    build.koji_tag = tag

    if build.scratch:
        log.debug(
            "Assigning Content Generator build koji tag is skipped for scratch module build."
        )
    elif conf.koji_cg_tag_build:
        cg_build_koji_tag = get_content_generator_build_koji_tag(build_deps)
        log.debug(
            "Assigning Content Generator build koji tag=%s to module build",
            cg_build_koji_tag)
        build.cg_build_koji_tag = cg_build_koji_tag
    else:
        log.debug(
            "It is disabled to tag module build during importing into Koji by Content Generator."
        )
        log.debug(
            "Skip to assign Content Generator build koji tag to module build.")

    builder = GenericBuilder.create_from_module(db_session, build, conf)

    log.debug(
        "Adding dependencies %s into buildroot for module %s:%s:%s",
        build_deps.keys(),
        build.name,
        build.stream,
        build.version,
    )
    builder.buildroot_add_repos(build_deps)

    if not build.component_builds:
        log.info("There are no components in module %r, skipping build" %
                 build)
        build.transition(db_session, conf, state=models.BUILD_STATES["build"])
        db_session.add(build)
        db_session.commit()
        # Return a KojiRepoChange message so that the build can be transitioned to done
        # in the repos handler
        from module_build_service.scheduler.handlers.repos import done as repos_done_handler
        events.scheduler.add(repos_done_handler,
                             ("fake_msg", builder.module_build_tag["name"]))
        return

    # If all components in module build will be reused, we don't have to build
    # module-build-macros, because there won't be any build done.
    if attempt_to_reuse_all_components(builder, build):
        log.info(
            "All components have been reused for module %r, skipping build" %
            build)
        build.transition(db_session, conf, state=models.BUILD_STATES["build"])
        db_session.add(build)
        db_session.commit()
        return []

    log.debug("Starting build batch 1")
    build.batch = 1
    db_session.commit()

    artifact_name = "module-build-macros"

    component_build = models.ComponentBuild.from_component_name(
        db_session, artifact_name, build.id)
    srpm = builder.get_disttag_srpm(disttag=".%s" %
                                    get_rpm_release(db_session, build),
                                    module_build=build)
    if not component_build:
        component_build = models.ComponentBuild(
            module_id=build.id,
            package=artifact_name,
            format="rpms",
            scmurl=srpm,
            batch=1,
            build_time_only=True,
        )
        db_session.add(component_build)
        # Commit and refresh so that the SQLAlchemy relationships are available
        db_session.commit()
        db_session.refresh(component_build)
        recovered = builder.recover_orphaned_artifact(component_build)
        if recovered:
            log.info("Found an existing module-build-macros build")
        # There was no existing artifact found, so lets submit the build instead
        else:
            task_id, state, reason, nvr = builder.build(
                artifact_name=artifact_name, source=srpm)
            component_build.task_id = task_id
            component_build.state = state
            component_build.reason = reason
            component_build.nvr = nvr
    elif not component_build.is_completed:
        # It's possible that the build succeeded in the builder but some other step failed which
        # caused module-build-macros to be marked as failed in MBS, so check to see if it exists
        # first
        recovered = builder.recover_orphaned_artifact(component_build)
        if recovered:
            log.info("Found an existing module-build-macros build")
        else:
            task_id, state, reason, nvr = builder.build(
                artifact_name=artifact_name, source=srpm)
            component_build.task_id = task_id
            component_build.state = state
            component_build.reason = reason
            component_build.nvr = nvr

    db_session.add(component_build)
    build.transition(db_session, conf, state=models.BUILD_STATES["build"])
    db_session.add(build)
    db_session.commit()

    # We always have to regenerate the repository.
    if conf.system == "koji":
        log.info("Regenerating the repository")
        task_id = builder.koji_session.newRepo(
            builder.module_build_tag["name"])
        build.new_repo_task_id = task_id
        db_session.commit()
    else:
        from module_build_service.scheduler.handlers.repos import done as repos_done_handler
        events.scheduler.add(repos_done_handler,
                             ("fake_msg", builder.module_build_tag["name"]))
Exemplo n.º 22
0
    def _koji_rpms_in_tag(self, tag):
        """ Return the list of koji rpms in a tag. """
        log.debug("Listing rpms in koji tag %s", tag)
        session = get_session(self.config, login=False)

        try:
            rpms, builds = session.listTaggedRPMS(tag, latest=True)
        except koji.GenericError:
            log.exception("Failed to list rpms in tag %r", tag)
            # If the tag doesn't exist.. then there are no rpms in that tag.
            return []

        # Module does not contain any RPM, so return an empty list.
        if not rpms:
            return []

        # Get the exclusivearch, excludearch and license data for each RPM.
        # The exclusivearch and excludearch lists are set in source RPM from which the RPM
        # was built.
        # Create temporary dict with source RPMs in rpm_id:rpms_list_index format.
        src_rpms = {}
        binary_rpms = {}
        for rpm in rpms:
            if rpm["arch"] == "src":
                src_rpms[rpm["id"]] = rpm
            else:
                binary_rpms[rpm["id"]] = rpm
        # Prepare the arguments for Koji multicall.
        # We will call session.getRPMHeaders(...) for each SRC RPM to get exclusivearch,
        # excludearch and license headers.
        multicall_kwargs = [
            {"rpmID": rpm_id, "headers": ["exclusivearch", "excludearch", "license"]}
            for rpm_id in src_rpms.keys()
        ]
        # For each binary RPM, we only care about the "license" header.
        multicall_kwargs += [
            {"rpmID": rpm_id, "headers": ["license"]} for rpm_id in binary_rpms.keys()
        ]
        rpms_headers = koji_retrying_multicall_map(
            session, session.getRPMHeaders, list_of_kwargs=multicall_kwargs
        )

        # Temporary dict with build_id as a key to find builds easily.
        builds = {build["build_id"]: build for build in builds}

        # Create a mapping of build IDs to SRPM NEVRAs so that the for loop below can directly
        # access these values when adding the `srpm_nevra` key to the returned RPMs
        build_id_to_srpm_nevra = {
            srpm["build_id"]: kobo.rpmlib.make_nvra(srpm, force_epoch=True)
            for srpm in src_rpms.values()
        }
        # Handle the multicall result. For each build associated with the source RPM,
        # store the exclusivearch and excludearch lists. For each RPM, store the 'license' and
        # also other useful data from the Build associated with the RPM.
        for rpm, headers in zip(chain(src_rpms.values(), binary_rpms.values()), rpms_headers):
            if not headers:
                raise RuntimeError("No RPM headers received from Koji for RPM %s" % rpm["name"])
            if "license" not in headers:
                raise RuntimeError(
                    "No RPM 'license' header received from Koji for RPM %s" % rpm["name"])
            build = builds[rpm["build_id"]]
            if "exclusivearch" in headers and "excludearch" in headers:
                build["exclusivearch"] = headers["exclusivearch"]
                build["excludearch"] = headers["excludearch"]

            rpm["license"] = headers["license"]
            rpm["srpm_name"] = build["name"]
            rpm["srpm_nevra"] = build_id_to_srpm_nevra[rpm["build_id"]]
            rpm["exclusivearch"] = build["exclusivearch"]
            rpm["excludearch"] = build["excludearch"]

        return rpms
Exemplo n.º 23
0
def koji_multicall_map(koji_session, koji_session_fnc, list_of_args=None, list_of_kwargs=None):
    """
    Calls the `koji_session_fnc` using Koji multicall feature N times based on the list of
    arguments passed in `list_of_args` and `list_of_kwargs`.
    Returns list of responses sorted the same way as input args/kwargs. In case of error,
    the error message is logged and None is returned.

    For example to get the package ids of "httpd" and "apr" packages:
        ids = koji_multicall_map(session, session.getPackageID, ["httpd", "apr"])
        # ids is now [280, 632]

    :param KojiSessions koji_session: KojiSession to use for multicall.
    :param object koji_session_fnc: Python object representing the KojiSession method to call.
    :param list list_of_args: List of args which are passed to each call of koji_session_fnc.
    :param list list_of_kwargs: List of kwargs which are passed to each call of koji_session_fnc.
    """
    if list_of_args is None and list_of_kwargs is None:
        raise ProgrammingError("One of list_of_args or list_of_kwargs must be set.")

    if (
        type(list_of_args) not in [type(None), list]
        or type(list_of_kwargs) not in [type(None), list]
    ):
        raise ProgrammingError("list_of_args and list_of_kwargs must be list or None.")

    if list_of_kwargs is None:
        list_of_kwargs = [{}] * len(list_of_args)
    if list_of_args is None:
        list_of_args = [[]] * len(list_of_kwargs)

    if len(list_of_args) != len(list_of_kwargs):
        raise ProgrammingError("Length of list_of_args and list_of_kwargs must be the same.")

    koji_session.multicall = True
    for args, kwargs in zip(list_of_args, list_of_kwargs):
        if type(args) != list:
            args = [args]
        if type(kwargs) != dict:
            raise ProgrammingError("Every item in list_of_kwargs must be a dict")
        koji_session_fnc(*args, **kwargs)

    try:
        responses = koji_session.multiCall(strict=True)
    except Exception:
        log.exception(
            "Exception raised for multicall of method %r with args %r, %r:",
            koji_session_fnc, args, kwargs,
        )
        return None

    if not responses:
        log.error("Koji did not return response for multicall of %r", koji_session_fnc)
        return None
    if type(responses) != list:
        log.error(
            "Fault element was returned for multicall of method %r: %r", koji_session_fnc, responses
        )
        return None

    results = []

    # For the response specification, see
    # https://web.archive.org/web/20060624230303/http://www.xmlrpc.com/discuss/msgReader$1208?mode=topic
    # Relevant part of this:
    # Multicall returns an array of responses. There will be one response for each call in
    # the original array. The result will either be a one-item array containing the result value,
    # or a struct of the form found inside the standard <fault> element.
    for response, args, kwargs in zip(responses, list_of_args, list_of_kwargs):
        if type(response) == list:
            if not response:
                log.error(
                    "Empty list returned for multicall of method %r with args %r, %r",
                    koji_session_fnc, args, kwargs
                )
                return None
            results.append(response[0])
        else:
            log.error(
                "Unexpected data returned for multicall of method %r with args %r, %r: %r",
                koji_session_fnc, args, kwargs, response
            )
            return None

    return results