Пример #1
0
    def _upload_outputs(self, session, metadata, file_dir):
        """
        Uploads output files to Koji hub.
        """
        to_upload = []
        for info in metadata["output"]:
            if info.get("metadata_only", False):
                continue
            localpath = os.path.join(file_dir, info["filename"])
            if not os.path.exists(localpath):
                err = "Cannot upload %s to Koji. No such file." % localpath
                log.error(err)
                raise RuntimeError(err)

            to_upload.append([localpath, info])

        # Create unique server directory.
        serverdir = "mbs/%r.%d" % (time.time(), self.module.id)

        for localpath, info in to_upload:
            log.info("Uploading %s to Koji" % localpath)
            session.uploadWrapper(localpath, serverdir, callback=None)
            log.info("Upload of %s to Koji done" % localpath)

        return serverdir
Пример #2
0
    def post(self, api_version):
        # disable this API endpoint if no groups are defined
        if not conf.allowed_groups_to_import_module:
            log.error(
                "Import module API is disabled. Set 'ALLOWED_GROUPS_TO_IMPORT_MODULE'"
                " configuration value first.")
            raise Forbidden("Import module API is disabled.")

        # auth checks
        username, groups = module_build_service.web.auth.get_user(request)
        ModuleBuildAPI.check_groups(
            username,
            groups,
            allowed_groups=conf.allowed_groups_to_import_module)

        # process request using SCM handler
        handler = SCMHandler(request)
        handler.validate(skip_branch=True, skip_optional_params=True)

        mmd, _ = fetch_mmd(handler.data["scmurl"], mandatory_checks=False)
        build, messages = import_mmd(db.session, mmd)
        json_data = {
            "module": build.json(db.session, show_tasks=False),
            "messages": messages
        }

        # return 201 Created if we reach this point
        return jsonify(json_data), 201
Пример #3
0
    def _get_output(self, output_path):
        ret = []
        for arch in self.arches + ["noarch", "src"]:
            mmd_dict = self._get_arch_mmd_output(output_path, arch)
            if mmd_dict:
                ret.append(mmd_dict)

        try:
            log_path = os.path.join(output_path, "build.log")
            with open(log_path, "rb") as build_log:
                checksum = hashlib.md5(build_log.read()).hexdigest()
            stat = os.stat(log_path)
            ret.append(
                {
                    u"buildroot_id": 1,
                    u"arch": u"noarch",
                    u"type": u"log",
                    u"filename": u"build.log",
                    u"filesize": stat.st_size,
                    u"checksum_type": u"md5",
                    u"checksum": checksum,
                }
            )
        except IOError:
            # no log file?
            log.error("No module build log file found. Excluding from import")

        return ret
Пример #4
0
    def patch(self, api_version, id):
        username, groups = module_build_service.web.auth.get_user(request)

        try:
            r = json.loads(request.get_data().decode("utf-8"))
        except Exception:
            log.exception("Invalid JSON submitted")
            raise ValidationError("Invalid JSON submitted")

        if "owner" in r:
            if conf.no_auth is not True:
                raise ValidationError(
                    "The request contains 'owner' parameter, however NO_AUTH is not allowed"
                )
            elif username == "anonymous":
                username = r["owner"]

        self.check_groups(username, groups)

        module = models.ModuleBuild.query.filter_by(id=id).first()
        if not module:
            raise NotFound("No such module found.")

        if module.owner != username and not (conf.admin_groups & groups):
            raise Forbidden(
                "You are not owner of this build and therefore cannot modify it."
            )

        if not r.get("state"):
            log.error("Invalid JSON submitted")
            raise ValidationError("Invalid JSON submitted")

        state = r["state"]
        valid_input_states = ("failed", str(models.BUILD_STATES["failed"]))
        if state not in valid_input_states:
            raise ValidationError(
                "An invalid state was submitted. Valid states values are: {}".
                format(", ".join(valid_input_states)))

        valid_states_to_cancel = ("build", "init", "wait")
        module_state_name = models.INVERSE_BUILD_STATES[module.state]
        if module_state_name not in valid_states_to_cancel:
            log.error(
                "The user %s attempted to cancel a build in the %s state",
                username,
                module_state_name,
            )
            raise ValidationError(
                "To cancel a module build, it must be in one of the following states: {}"
                .format(", ".join(valid_states_to_cancel)))

        module.transition(db.session, conf, models.BUILD_STATES["failed"],
                          "Canceled by %s." % username)
        db.session.add(module)
        db.session.commit()

        return jsonify(module.extended_json(db.session, True,
                                            api_version)), 200
Пример #5
0
    def test_module_build_logs(self):
        """
        Tests that ModuleBuildLogs is logging properly to build log file.
        """
        build = models.ModuleBuild.get_by_id(db_session, 2)

        # Initialize logging, get the build log path and remove it to
        # ensure we are not using some garbage from previous failed test.
        self.build_log.start(db_session, build)
        path = self.build_log.path(db_session, build)
        assert path[len(self.base):] == "/build-2.log"
        if os.path.exists(path):
            os.unlink(path)

        # Try logging without the MBSConsumer.current_module_build_id set.
        # No log file should be created.
        log.debug("ignore this test msg")
        log.info("ignore this test msg")
        log.warning("ignore this test msg")
        log.error("ignore this test msg")
        self.build_log.stop(build)
        assert not os.path.exists(path)

        # Try logging with current_module_build_id set to 2 and then to 2.
        # Only messages with current_module_build_id set to 2 should appear in
        # the log.
        self.build_log.start(db_session, build)
        MBSConsumer.current_module_build_id = 1
        log.debug("ignore this test msg1")
        log.info("ignore this test msg1")
        log.warning("ignore this test msg1")
        log.error("ignore this test msg1")

        MBSConsumer.current_module_build_id = 2
        log.debug("ignore this test msg2")
        log.info("ignore this test msg2")
        log.warning("ignore this test msg2")
        log.error("ignore this test msg2")

        self.build_log.stop(build)
        assert os.path.exists(path)
        with open(path, "r") as f:
            data = f.read()
            # Note that DEBUG is not present unless configured server-wide.
            for level in ["INFO", "WARNING", "ERROR"]:
                assert data.find(
                    "MBS - {0} - ignore this test msg2".format(level)) != -1

        # Try to log more messages when build_log for module 1 is stopped.
        # New messages should not appear in a log.
        MBSConsumer.current_module_build_id = 2
        log.debug("ignore this test msg3")
        log.info("ignore this test msg3")
        log.warning("ignore this test msg3")
        log.error("ignore this test msg3")
        self.build_log.stop(build)
        with open(path, "r") as f:
            data = f.read()
            assert data.find("ignore this test msg3") == -1
Пример #6
0
def load_mmd(yaml, is_file=False):
    if not yaml:
        raise UnprocessableEntity('The input modulemd was empty')

    target_mmd_version = Modulemd.ModuleStreamVersionEnum.TWO
    try:
        if is_file:
            mmd = Modulemd.ModuleStream.read_file(yaml, True)
        else:
            mmd = Modulemd.ModuleStream.read_string(to_text_type(yaml), True)
        mmd.validate()
        if mmd.get_mdversion() < target_mmd_version:
            mmd = mmd.upgrade(target_mmd_version)
        elif mmd.get_mdversion() > target_mmd_version:
            log.error("Encountered a modulemd file with the version %d", mmd.get_mdversion())
            raise UnprocessableEntity(
                "The modulemd version cannot be greater than {}".format(target_mmd_version))
    except ModuleMDError as e:
        not_found = False
        if is_file:
            error = "The modulemd {} is invalid.".format(os.path.basename(yaml))
            if os.path.exists(yaml):
                with open(yaml, "rt") as yaml_hdl:
                    log.debug("Modulemd that failed to load:\n%s", yaml_hdl.read())
            else:
                not_found = True
                error = "The modulemd file {} was not found.".format(os.path.basename(yaml))
                log.error("The modulemd file %s was not found.", yaml)
        else:
            error = "The modulemd is invalid."
            log.debug("Modulemd that failed to load:\n%s", yaml)

        if "modulemd-error-quark: " in str(e):
            error = "{} The error was '{}'.".format(
                error, str(e).split("modulemd-error-quark: ")[-1])
        elif "Unknown ModuleStream version" in str(e):
            error = (
                "{}. The modulemd version can't be greater than {}."
                .format(error, target_mmd_version)
            )
        elif not_found is False:
            error = "{} Please verify the syntax is correct.".format(error)

        log.exception(error)
        raise UnprocessableEntity(error)

    return mmd
Пример #7
0
def delete_old_koji_targets():
    """
    Deletes targets older than `config.koji_target_delete_time` seconds
    from Koji to cleanup after the module builds.
    """
    if conf.system != "koji":
        return

    log.info("Looking for module builds which Koji target can be removed")

    now = datetime.utcnow()

    koji_session = get_session(conf)
    for target in koji_session.getBuildTargets():
        module = db_session.query(models.ModuleBuild).filter(
            models.ModuleBuild.koji_tag == target["dest_tag_name"],
            models.ModuleBuild.name.notin_(conf.base_module_names),
            models.ModuleBuild.state.notin_([
                models.BUILD_STATES["init"],
                models.BUILD_STATES["wait"],
                models.BUILD_STATES["build"],
            ]),
        ).options(
            load_only("time_completed"),
        ).first()

        if module is None:
            continue

        # Double-check that the target we are going to remove is prefixed
        # by our prefix, so we won't remove f26 when there is some garbage
        # in DB or Koji.
        for allowed_prefix in conf.koji_tag_prefixes:
            if target["name"].startswith(allowed_prefix + "-"):
                break
        else:
            log.error("Module %r has Koji target with not allowed prefix.", module)
            continue

        delta = now - module.time_completed
        if delta.total_seconds() > conf.koji_target_delete_time:
            log.info("Removing target of module %r", module)
            koji_session.deleteBuildTarget(target["id"])
Пример #8
0
    def get_module_yaml(self):
        """
        Get full path to the module's YAML file.

        :return: path as a string
        :raises UnprocessableEntity
        """
        if not self.sourcedir:
            raise ProgrammingError("Do .checkout() first.")

        path_to_yaml = os.path.join(self.sourcedir, (self.name + ".yaml"))
        try:
            with open(path_to_yaml):
                return path_to_yaml
        except IOError:
            log.error(
                "get_module_yaml: The SCM repository doesn't contain a modulemd file. "
                "Couldn't access: %s" % path_to_yaml)
            raise UnprocessableEntity(
                "The SCM repository doesn't contain a modulemd file")
Пример #9
0
    def __init__(self):
        if not conf.ldap_uri:
            raise Forbidden("LDAP_URI must be set in server config.")
        if conf.ldap_groups_dn:
            self.base_dn = conf.ldap_groups_dn
        else:
            raise Forbidden("LDAP_GROUPS_DN must be set in server config.")

        if conf.ldap_uri.startswith("ldaps://"):
            tls = ldap3.Tls(ca_certs_file="/etc/pki/tls/certs/ca-bundle.crt",
                            validate=ssl.CERT_REQUIRED)
            server = ldap3.Server(conf.ldap_uri, use_ssl=True, tls=tls)
        else:
            server = ldap3.Server(conf.ldap_uri)
        self.connection = ldap3.Connection(server)
        try:
            self.connection.open()
        except ldap3.core.exceptions.LDAPSocketOpenError as error:
            log.error(
                'The connection to "{0}" failed. The following error was raised: {1}'
                .format(conf.ldap_uri, str(error)))
            raise Forbidden(
                "The connection to the LDAP server failed. Group membership couldn't be obtained."
            )
Пример #10
0
def import_mmd(db_session, mmd, check_buildrequires=True):
    """
    Imports new module build defined by `mmd` to MBS database using `session`.
    If it already exists, it is updated.

    The ModuleBuild.koji_tag is set according to xmd['mbs]['koji_tag'].
    The ModuleBuild.state is set to "ready".
    The ModuleBuild.rebuild_strategy is set to "all".
    The ModuleBuild.owner is set to "mbs_import".

    :param db_session: SQLAlchemy session object.
    :param mmd: module metadata being imported into database.
    :type mmd: Modulemd.ModuleStream
    :param bool check_buildrequires: When True, checks that the buildrequires defined in the MMD
        have matching records in the `mmd["xmd"]["mbs"]["buildrequires"]` and also fills in
        the `ModuleBuild.buildrequires` according to this data.
    :return: module build (ModuleBuild),
             log messages collected during import (list)
    :rtype: tuple
    """
    from module_build_service.common import models

    xmd = mmd.get_xmd()
    # Set some defaults in xmd["mbs"] if they're not provided by the user
    if "mbs" not in xmd:
        xmd["mbs"] = {"mse": True}

    if not mmd.get_context():
        mmd.set_context(models.DEFAULT_MODULE_CONTEXT)

    # NSVC is used for logging purpose later.
    nsvc = mmd.get_nsvc()
    if nsvc is None:
        msg = "Both the name and stream must be set for the modulemd being imported."
        log.error(msg)
        raise UnprocessableEntity(msg)

    name = mmd.get_module_name()
    stream = mmd.get_stream_name()
    version = str(mmd.get_version())
    context = mmd.get_context()

    xmd_mbs = xmd["mbs"]

    disttag_marking = xmd_mbs.get("disttag_marking")

    # If it is a base module, then make sure the value that will be used in the RPM disttags
    # doesn't contain a dash since a dash isn't allowed in the release field of the NVR
    if name in conf.base_module_names:
        if disttag_marking and "-" in disttag_marking:
            msg = "The disttag_marking cannot contain a dash"
            log.error(msg)
            raise UnprocessableEntity(msg)
        if not disttag_marking and "-" in stream:
            msg = "The stream cannot contain a dash unless disttag_marking is set"
            log.error(msg)
            raise UnprocessableEntity(msg)

    virtual_streams = xmd_mbs.get("virtual_streams", [])

    # Verify that the virtual streams are the correct type
    if virtual_streams and (
        not isinstance(virtual_streams, list)
        or any(not isinstance(vs, string_types) for vs in virtual_streams)
    ):
        msg = "The virtual streams must be a list of strings"
        log.error(msg)
        raise UnprocessableEntity(msg)

    if check_buildrequires:
        deps = mmd.get_dependencies()
        if len(deps) > 1:
            raise UnprocessableEntity(
                "The imported module's dependencies list should contain just one element")

        if "buildrequires" not in xmd_mbs:
            # Always set buildrequires if it is not there, because
            # get_buildrequired_base_modules requires xmd/mbs/buildrequires exists.
            xmd_mbs["buildrequires"] = {}
            mmd.set_xmd(xmd)

        if len(deps) > 0:
            brs = set(deps[0].get_buildtime_modules())
            xmd_brs = set(xmd_mbs["buildrequires"].keys())
            if brs - xmd_brs:
                raise UnprocessableEntity(
                    "The imported module buildrequires other modules, but the metadata in the "
                    'xmd["mbs"]["buildrequires"] dictionary is missing entries'
                )

    if "koji_tag" not in xmd_mbs:
        log.warning("'koji_tag' is not set in xmd['mbs'] for module {}".format(nsvc))
        log.warning("koji_tag will be set to None for imported module build.")

    # Log messages collected during import
    msgs = []

    # Get the ModuleBuild from DB.
    build = models.ModuleBuild.get_build_from_nsvc(db_session, name, stream, version, context)
    if build:
        msg = "Updating existing module build {}.".format(nsvc)
        log.info(msg)
        msgs.append(msg)
    else:
        build = models.ModuleBuild()
        db_session.add(build)

    build.name = name
    build.stream = stream
    build.version = version
    build.koji_tag = xmd_mbs.get("koji_tag")
    build.state = models.BUILD_STATES["ready"]
    build.modulemd = mmd_to_str(mmd)
    build.context = context
    build.owner = "mbs_import"
    build.rebuild_strategy = "all"
    now = datetime.utcnow()
    build.time_submitted = now
    build.time_modified = now
    build.time_completed = now
    if build.name in conf.base_module_names:
        build.stream_version = models.ModuleBuild.get_stream_version(stream)

    # Record the base modules this module buildrequires
    if check_buildrequires:
        for base_module in build.get_buildrequired_base_modules(db_session):
            if base_module not in build.buildrequires:
                build.buildrequires.append(base_module)

    build.update_virtual_streams(db_session, virtual_streams)

    db_session.commit()

    msg = "Module {} imported".format(nsvc)
    log.info(msg)
    msgs.append(msg)

    return build, msgs
Пример #11
0
def koji_multicall_map(koji_session, koji_session_fnc, list_of_args=None, list_of_kwargs=None):
    """
    Calls the `koji_session_fnc` using Koji multicall feature N times based on the list of
    arguments passed in `list_of_args` and `list_of_kwargs`.
    Returns list of responses sorted the same way as input args/kwargs. In case of error,
    the error message is logged and None is returned.

    For example to get the package ids of "httpd" and "apr" packages:
        ids = koji_multicall_map(session, session.getPackageID, ["httpd", "apr"])
        # ids is now [280, 632]

    :param KojiSessions koji_session: KojiSession to use for multicall.
    :param object koji_session_fnc: Python object representing the KojiSession method to call.
    :param list list_of_args: List of args which are passed to each call of koji_session_fnc.
    :param list list_of_kwargs: List of kwargs which are passed to each call of koji_session_fnc.
    """
    if list_of_args is None and list_of_kwargs is None:
        raise ProgrammingError("One of list_of_args or list_of_kwargs must be set.")

    if (
        type(list_of_args) not in [type(None), list]
        or type(list_of_kwargs) not in [type(None), list]
    ):
        raise ProgrammingError("list_of_args and list_of_kwargs must be list or None.")

    if list_of_kwargs is None:
        list_of_kwargs = [{}] * len(list_of_args)
    if list_of_args is None:
        list_of_args = [[]] * len(list_of_kwargs)

    if len(list_of_args) != len(list_of_kwargs):
        raise ProgrammingError("Length of list_of_args and list_of_kwargs must be the same.")

    koji_session.multicall = True
    for args, kwargs in zip(list_of_args, list_of_kwargs):
        if type(args) != list:
            args = [args]
        if type(kwargs) != dict:
            raise ProgrammingError("Every item in list_of_kwargs must be a dict")
        koji_session_fnc(*args, **kwargs)

    try:
        responses = koji_session.multiCall(strict=True)
    except Exception:
        log.exception(
            "Exception raised for multicall of method %r with args %r, %r:",
            koji_session_fnc, args, kwargs,
        )
        return None

    if not responses:
        log.error("Koji did not return response for multicall of %r", koji_session_fnc)
        return None
    if type(responses) != list:
        log.error(
            "Fault element was returned for multicall of method %r: %r", koji_session_fnc, responses
        )
        return None

    results = []

    # For the response specification, see
    # https://web.archive.org/web/20060624230303/http://www.xmlrpc.com/discuss/msgReader$1208?mode=topic
    # Relevant part of this:
    # Multicall returns an array of responses. There will be one response for each call in
    # the original array. The result will either be a one-item array containing the result value,
    # or a struct of the form found inside the standard <fault> element.
    for response, args, kwargs in zip(responses, list_of_args, list_of_kwargs):
        if type(response) == list:
            if not response:
                log.error(
                    "Empty list returned for multicall of method %r with args %r, %r",
                    koji_session_fnc, args, kwargs
                )
                return None
            results.append(response[0])
        else:
            log.error(
                "Unexpected data returned for multicall of method %r with args %r, %r: %r",
                koji_session_fnc, args, kwargs, response
            )
            return None

    return results
Пример #12
0
def submit_module_build(db_session, username, mmd, params):
    """
    Submits new module build.

    :param db_session: SQLAlchemy session object.
    :param str username: Username of the build's owner.
    :param Modulemd.ModuleStream mmd: Modulemd defining the build.
    :param dict params: the API parameters passed in by the user
    :rtype: list with ModuleBuild
    :return: List with submitted module builds.
    """
    log.debug(
        "Submitted %s module build for %s:%s:%s",
        ("scratch" if params.get("scratch", False) else "normal"),
        mmd.get_module_name(),
        mmd.get_stream_name(),
        mmd.get_version(),
    )

    raise_if_stream_ambigous = False
    default_streams = {}
    # For local builds, we want the user to choose the exact stream using the default_streams
    # in case there are multiple streams to choose from and raise an exception otherwise.
    if "local_build" in params:
        raise_if_stream_ambigous = True
    # Get the default_streams if set.
    if "default_streams" in params:
        default_streams = params["default_streams"]

    xmd = mmd.get_xmd()
    # we check if static contexts are enabled by the `contexts` property defined by the user i
    # as an build option.
    static_context = "mbs_options" in xmd and "contexts" in xmd["mbs_options"]
    input_mmds = generate_mmds_from_static_contexts(mmd) if static_context else [mmd]

    mmds = []
    for mmd in input_mmds:
        validate_mmd(mmd)
        _apply_dep_overrides(mmd, params)
        _modify_buildtime_streams(db_session, mmd, resolve_base_module_virtual_streams)
        _process_support_streams(db_session, mmd, params)
        mmds += generate_expanded_mmds(db_session, mmd, raise_if_stream_ambigous,
                                       default_streams, static_context=static_context)

    if not mmds:
        raise ValidationError(
            "No dependency combination was satisfied. Please verify the "
            "buildrequires in your modulemd have previously been built."
        )
    modules = []

    # True if all module builds are skipped so MBS will actually not rebuild
    # anything. To keep the backward compatibility, we need to raise an exception
    # later in the end of this method.
    all_modules_skipped = True

    for mmd in mmds:
        # Prefix the version of the modulemd based on the base module it buildrequires
        version = get_prefixed_version(mmd)
        mmd.set_version(version)
        nsvc = mmd.get_nsvc()

        log.debug("Checking whether module build already exists: %s.", nsvc)
        module = models.ModuleBuild.get_build_from_nsvc(db_session, *nsvc.split(":"))
        if module and not params.get("scratch", False):
            if module.state != models.BUILD_STATES["failed"]:
                log.info(
                    "Skipping rebuild of %s, only rebuild of modules in failed state is allowed.",
                    nsvc,
                )
                modules.append(module)
                continue

            rebuild_strategy = params.get("rebuild_strategy")
            if rebuild_strategy and module.rebuild_strategy != rebuild_strategy:
                raise ValidationError(
                    'You cannot change the module\'s "rebuild_strategy" when '
                    "resuming a module build"
                )

            log.debug("Resuming existing module build %r" % module)
            # Reset all component builds that didn't complete
            for component in module.component_builds:
                if not component.is_waiting_for_build and not component.is_completed:
                    component.state = None
                    component.state_reason = None
                    db_session.add(component)
            module.username = username
            prev_state = module.previous_non_failed_state
            if prev_state == models.BUILD_STATES["init"]:
                transition_to = models.BUILD_STATES["init"]
            else:
                transition_to = models.BUILD_STATES["wait"]
                module.batch = 0
            module.transition(db_session, conf, transition_to, "Resubmitted by %s" % username)
            db_session.commit()
            log.info("Resumed existing module build in previous state %s" % module.state)
        else:
            # make NSVC unique for every scratch build
            context_suffix = ""
            if params.get("scratch", False):
                log.debug("Checking for existing scratch module builds by NSVC")
                scrmods = models.ModuleBuild.get_scratch_builds_from_nsvc(
                    db_session, *nsvc.split(":"))
                scrmod_contexts = [scrmod.context for scrmod in scrmods]
                log.debug(
                    "Found %d previous scratch module build context(s): %s",
                    len(scrmods), ",".join(scrmod_contexts),
                )
                # append incrementing counter to context
                context_suffix = "_" + str(len(scrmods) + 1)
                mmd.set_context(mmd.get_context() + context_suffix)
            else:
                # In case the branch is defined, check whether user is allowed to submit
                # non-scratch build from this branch. Note that the branch is always defined
                # for official builds from SCM, because it is requested in views.py.
                branch = params.get("branch")
                if branch:
                    for regex in conf.scratch_build_only_branches:
                        branch_search = re.search(regex, branch)
                        if branch_search:
                            raise ValidationError(
                                "Only scratch module builds can be built from this branch."
                            )

            log.debug("Creating new module build")
            module = models.ModuleBuild.create(
                db_session,
                conf,
                name=mmd.get_module_name(),
                stream=mmd.get_stream_name(),
                version=str(mmd.get_version()),
                modulemd=mmd_to_str(mmd),
                scmurl=params.get("scmurl"),
                username=username,
                rebuild_strategy=params.get("rebuild_strategy"),
                reused_module_id=params.get("reuse_components_from"),
                scratch=params.get("scratch"),
                srpms=params.get("srpms"),
            )
            module.build_context, module.runtime_context, module.context, \
                module.build_context_no_bms = module.contexts_from_mmd(module.modulemd)

            xmd = mmd.get_xmd()
            if xmd["mbs"].get("static_context"):
                module.context = mmd.get_context()

            module.context += context_suffix
            db_session.commit()

            notify_on_module_state_change(
                # Note the state is "init" here...
                module.json(db_session, show_tasks=False)
            )

        all_modules_skipped = False
        modules.append(module)
        log.info('The user "%s" submitted the build "%s"', username, nsvc)

    if all_modules_skipped:
        err_msg = (
            "Module (state=%s) already exists. Only a new build, resubmission of "
            "a failed build or build against new buildrequirements is "
            "allowed." % module.state
        )
        log.error(err_msg)
        raise Conflict(err_msg)

    return modules
Пример #13
0
 def validate(self):
     if ("modulemd" not in self.data and
         (not hasattr(request, "files") or "yaml" not in request.files)):
         log.error("Invalid file submitted")
         raise ValidationError("Invalid file submitted")
     self.validate_optional_params()
Пример #14
0
    def validate(self, skip_branch=False, skip_optional_params=False):
        if "scmurl" not in self.data:
            log.error("Missing scmurl")
            raise ValidationError("Missing scmurl")

        url = self.data["scmurl"]
        allowed_prefix = any(url.startswith(prefix) for prefix in conf.scmurls)
        if not conf.allow_custom_scmurls and not allowed_prefix:
            log.error("The submitted scmurl %r is not allowed" % url)
            raise Forbidden("The submitted scmurl %s is not allowed" % url)

        if not get_scm_url_re().match(url):
            log.error("The submitted scmurl %r is not valid" % url)
            raise ValidationError("The submitted scmurl %s is not valid" % url)

        if not skip_branch and "branch" not in self.data:
            log.error("Missing branch")
            raise ValidationError("Missing branch")

        if "module_name" in self.data:
            log.error(
                "Module name override is only allowed when a YAML file is submitted"
            )
            raise ValidationError(
                "Module name override is only allowed when a YAML file is submitted"
            )
        if "module_stream" in self.data:
            log.error(
                "Stream name override is only allowed when a YAML file is submitted"
            )
            raise ValidationError(
                "Stream name override is only allowed when a YAML file is submitted"
            )

        if not skip_optional_params:
            self.validate_optional_params()
Пример #15
0
def tagged(msg_id, tag_name, build_nvr):
    """Called whenever koji tags a build to tag.

    :param str msg_id: the original id of the message being handled which is
        received from the message bus.
    :param str tag_name: the tag name applied.
    :param str build_nvr: nvr of the tagged build.
    """
    if conf.system not in ("koji", "test"):
        return []

    # Find our ModuleBuild associated with this tagged artifact.
    module_build = models.ModuleBuild.get_by_tag(db_session, tag_name)
    if not module_build:
        log.debug("No module build found associated with koji tag %r",
                  tag_name)
        return

    # Find tagged component.
    component = models.ComponentBuild.from_component_nvr(
        db_session, build_nvr, module_build.id)
    if not component:
        log.error("No component %s in module %r", build_nvr, module_build)
        return

    log.info("Saw relevant component tag of %r from %r.", component.nvr,
             msg_id)

    # Mark the component as tagged
    if tag_name.endswith("-build"):
        component.tagged = True
    else:
        component.tagged_in_final = True
    db_session.commit()

    if any(c.is_unbuilt for c in module_build.current_batch()):
        log.info(
            "Not regenerating repo for tag %s, there are still building components in a batch",
            tag_name,
        )
        return []

    # If all components are tagged, start newRepo task.
    if not any(c.is_completed and not c.is_tagged
               for c in module_build.up_to_current_batch()):
        builder = GenericBuilder.create_from_module(db_session, module_build,
                                                    conf)

        if any(c.is_unbuilt for c in module_build.component_builds):
            if not _is_new_repo_generating(module_build, builder.koji_session):
                repo_tag = builder.module_build_tag["name"]
                log.info(
                    "All components in batch tagged, regenerating repo for tag %s",
                    repo_tag)
                task_id = builder.koji_session.newRepo(repo_tag)
                module_build.new_repo_task_id = task_id
            else:
                log.info(
                    "newRepo task %s for %r already in progress, not starting another one",
                    str(module_build.new_repo_task_id),
                    module_build,
                )
        else:
            # In case this is the last batch, we do not need to regenerate the
            # buildroot, because we will not build anything else in it. It
            # would be useless to wait for a repository we will not use anyway.
            log.info(
                "All components in module tagged and built, skipping the last repo regeneration"
            )
            from module_build_service.scheduler.handlers.repos import done as repos_done_handler
            events.scheduler.add(
                repos_done_handler,
                ("fake_msg", builder.module_build_tag["name"]))
        db_session.commit()
Пример #16
0
    def build_srpm(self, artifact_name, source, build_id, builder):
        """
        Builds the artifact from the SRPM.
        """
        state = koji.BUILD_STATES["BUILDING"]

        # Use the mock config associated with this thread.
        mock_config = os.path.join(
            self.configdir,
            "mock-%s.cfg" % str(threading.current_thread().name))

        # Open the logs to which we will forward mock stdout/stderr.
        mock_stdout_log = open(
            os.path.join(self.resultsdir, artifact_name + "-mock-stdout.log"),
            "w")
        mock_stderr_log = open(
            os.path.join(self.resultsdir, artifact_name + "-mock-stderr.log"),
            "w")

        srpm = artifact_name
        resultsdir = builder.resultsdir
        try:
            # Initialize mock.
            execute_cmd(
                ["mock", "-v", "-r", mock_config, "--init"],
                stdout=mock_stdout_log,
                stderr=mock_stderr_log,
            )

            # Start the build and store results to resultsdir
            builder.build(mock_stdout_log, mock_stderr_log)
            srpm = find_srpm(resultsdir)

            # Emit messages simulating complete build. These messages
            # are put in the scheduler's work queue and are handled
            # by MBS after the build_srpm() method returns and scope gets
            # back to scheduler.main.main() method.
            state = koji.BUILD_STATES["COMPLETE"]
            self._send_build_change(state, srpm, build_id)

            with open(os.path.join(resultsdir, "status.log"), "w") as f:
                f.write("complete\n")
        except Exception as e:
            log.error("Error while building artifact %s: %s" %
                      (artifact_name, str(e)))

            # Emit messages simulating complete build. These messages
            # are put in the scheduler's work queue and are handled
            # by MBS after the build_srpm() method returns and scope gets
            # back to scheduler.main.main() method.
            state = koji.BUILD_STATES["FAILED"]
            self._send_build_change(state, srpm, build_id)
            with open(os.path.join(resultsdir, "status.log"), "w") as f:
                f.write("failed\n")

        mock_stdout_log.close()
        mock_stderr_log.close()

        self._save_log(resultsdir, "state.log", artifact_name)
        self._save_log(resultsdir, "root.log", artifact_name)
        self._save_log(resultsdir, "build.log", artifact_name)
        self._save_log(resultsdir, "status.log", artifact_name)

        # Copy files from thread-related resultsdire to the main resultsdir.
        for name in os.listdir(resultsdir):
            os.rename(os.path.join(resultsdir, name),
                      os.path.join(self.resultsdir, name))

        # Depending on the configuration settings, remove/keep useless log files
        if conf.mock_purge_useless_logs:
            self._purge_useless_logs()

        # We return BUILDING state here even when we know it is already
        # completed or failed, because otherwise utils.start_build_batch
        # would think this component is already built and also tagged, but
        # we have just built it - tagging will happen as result of build
        # change message we are sending above using _send_build_change.
        # It is just to make this backend compatible with other backends,
        # which return COMPLETE here only in case the resulting build is
        # already in repository ready to be used. This is not a case for Mock
        # backend in the time we return here.
        reason = "Building %s in Mock" % (artifact_name)
        return build_id, koji.BUILD_STATES["BUILDING"], reason, None
Пример #17
0
def failed(msg_id, module_build_id, module_build_state):
    """Called whenever a module enters the 'failed' state.

    We cancel all the remaining component builds of a module
    and stop the building.

    :param str msg_id: the original id of the message being handled, which is
        received from the message bus.
    :param int module_build_id: the module build id.
    :param int module_build_state: the module build state.
    """
    build = models.ModuleBuild.get_by_id(db_session, module_build_id)

    if build.state != module_build_state:
        log.warning(
            "Note that retrieved module state %r doesn't match message module state %r",
            build.state,
            module_build_state,
        )
        # This is ok.. it's a race condition we can ignore.
        pass

    if build.koji_tag:
        builder = GenericBuilder.create_from_module(db_session, build, conf)

        if build.new_repo_task_id:
            builder.cancel_build(build.new_repo_task_id)

        for component in (c for c in build.component_builds if c.is_unbuilt):
            if component.task_id:
                builder.cancel_build(component.task_id)
            component.state = koji.BUILD_STATES["FAILED"]
            component.state_reason = build.state_reason
            db_session.add(component)

        # Tell the external buildsystem to wrap up
        builder.finalize(succeeded=False)
    else:
        # Do not overwrite state_reason set by Frontend if any.
        if not build.state_reason:
            reason = "Missing koji tag. Assuming previously failed module lookup."
            log.error(reason)
            build.transition(db_session,
                             conf,
                             state=models.BUILD_STATES["failed"],
                             state_reason=reason,
                             failure_type="infra")
            db_session.commit()
            return

    # Don't transition it again if it's already been transitioned
    if build.state != models.BUILD_STATES["failed"]:
        build.transition(db_session,
                         conf,
                         state=models.BUILD_STATES["failed"],
                         failure_type="user")

    db_session.commit()

    build_logs.stop(build)
    GenericBuilder.clear_cache(build)