def rebuild_if_not_exists(self, event, errata_id):
        """
        Initiates rebuild of artifacts based on Errata advisory with
        `errata_id` id.

        :rtype: List of ErrataAdvisoryRPMsSignedEvent instances.
        :return: List of extra events generated to initiate the rebuild.
        """

        db_event = db.session.query(Event).filter_by(
            event_type_id=EVENT_TYPES[ErrataAdvisoryRPMsSignedEvent],
            search_key=str(errata_id)).first()
        if (db_event and db_event.state != EventState.FAILED.value and
                not event.manual):
            log.debug("Ignoring Errata advisory %d - it already exists in "
                      "Freshmaker db.", errata_id)
            return []

        # Get additional info from Errata to fill in the needed data.
        errata = Errata()
        advisories = errata.advisories_from_event(event)
        if not advisories:
            log.error("Unknown Errata advisory %d" % errata_id)
            return []

        log.info("Generating ErrataAdvisoryRPMsSignedEvent for Errata "
                 "advisory %d, because its state changed to %s.", errata_id,
                 event.advisory.state)
        advisory = advisories[0]
        new_event = ErrataAdvisoryRPMsSignedEvent(
            event.msg_id + "." + str(advisory.name), advisory)
        new_event.dry_run = event.dry_run
        new_event.manual = event.manual
        return [new_event]
Esempio n. 2
0
    def register_parsers(self):
        parser_classes = load_classes(conf.parsers)
        for parser_class in parser_classes:
            events.BaseEvent.register_parser(parser_class)
        log.debug("Parser classes: %r", events.BaseEvent._parsers)

        self.topic = events.BaseEvent.get_parsed_topics()
        log.debug('Setting topics: {}'.format(', '.join(self.topic)))
Esempio n. 3
0
    def parse(self, topic, msg):
        msg_id = msg.get('msg_id')
        msg_inner_msg = msg.get('msg')

        # If there isn't a msg dict in msg then this message can be skipped
        if not msg_inner_msg:
            log.debug(('Skipping message without any content with the '
                       'topic "{0}"').format(topic))
            return None

        return KojiTaskStateChangeEvent(msg_id, msg_inner_msg.get('id'),
                                        msg_inner_msg.get('new'))
    def mark_as_released(self, errata_id):
        """
        Marks the Errata advisory with `errata_id` ID as "released", so it
        is not included in further container images rebuilds.
        """
        # check db to see whether this advisory exists in db
        db_event = db.session.query(Event).filter_by(
            event_type_id=EVENT_TYPES[ErrataAdvisoryRPMsSignedEvent],
            search_key=str(errata_id)).first()
        if not db_event:
            log.debug("Ignoring Errata advisory %d - it does not exist in "
                      "Freshmaker db.", errata_id)
            return []

        self.set_context(db_event)

        db_event.released = True
        db.session.commit()
        log.info("Errata advisory %d is now marked as released", errata_id)
Esempio n. 5
0
    def query_sfm2(self, cve):
        """
        Queries SFM2 to find out infos about the cve, specifically
        the CVE impact and list of affected packages
        It queries api/public/flaws?id=$cve&include_fields=affects,impact endpoint.

        :param str cve: CVE, for example "CVE-2017-10268".
        :rtype: dict
        :return: dict with two keys, "impact", and "affects". The first references
        the impact of the CVE, and the second is a list of dicts representing packages
        affected by the CVE.
        """
        log.debug("Querying SFM2 for %s", cve)
        r = requests.get(
            "%s/api/public/flaws" % self.server_url,
            params={"include_fields": "affects,impact", "id": cve})
        r.raise_for_status()

        return r.json()[0]
Esempio n. 6
0
    def get_operator_indices(self):
        """ Get all index images for organization(s)(configurable) from Pyxis """
        request_params = {}
        organizations = conf.pyxis_index_image_organizations
        if organizations:
            rsql = " or ".join([
                f"organization=={organization}"
                for organization in organizations
            ])
            request_params["filter"] = rsql
        indices = self._pagination("operators/indices", request_params)
        log.debug("Found the following index images: %s",
                  ", ".join(i["path"] for i in indices))

        # Operator indices can be available in pyxis prior to the Openshift version
        # is released, so we need to filter out such indices
        indices = list(
            filter(lambda x: self.ocp_is_released(x["ocp_version"]), indices))
        log.info("Using the following GA index images: %s",
                 ", ".join(i["path"] for i in indices))
        return indices
Esempio n. 7
0
def koji_service(profile=None, logger=None, login=True, dry_run=False):
    """A Koji service context manager that could be used with with

    Example::

        with KojiService() as service:
            ...

        # if you want it to log something
        with KojiService(logger=logger) as service:
            ...

        # if you want it to use alternative Koji profile rather than the default one koji
        with KojiService(koji='stg', logger=logger) as service:
            ...
    """
    service = KojiService(profile=profile, dry_run=dry_run)

    if login:
        if not conf.krb_auth_principal:
            log.error("Cannot login to Koji, krb_auth_principal not set")
        else:
            log.debug('Logging into %s with Kerberos authentication.',
                      service.server)

            service.krb_login()

            # We are not logged in in dry run mode...
            if not dry_run and not service.logged_in:
                log.error('Could not login server %s', service.server)
                yield None

    try:
        yield service
    finally:
        if service.logged_in:
            if logger:
                logger.debug('Logout Koji session')
            service.logout()
Esempio n. 8
0
    def process_event(self, msg):
        log.debug(
            'Received a message with an ID of "{0}" and of type "{1}"'.format(
                getattr(msg, 'msg_id', None),
                type(msg).__name__))

        handlers = load_classes(conf.handlers)
        handlers = sorted(handlers,
                          key=lambda handler: getattr(handler, "order", 50))
        for handler_class in handlers:
            handler = handler_class()

            if not handler.can_handle(msg):
                continue

            idx = "%s: %s, %s" % (type(handler).__name__, type(msg).__name__,
                                  msg.msg_id)
            log.debug("Calling %s" % idx)
            try:
                further_work = handler.handle(msg) or []
            except Exception:
                err = 'Could not process message handler. See the traceback.'
                log.exception(err)
            else:
                # Handlers can *optionally* return a list of fake messages that
                # should be re-inserted back into the main work queue. We can
                # use this (for instance) when we submit a new component build
                # but (for some reason) it has already been built, then it can
                # fake its own completion back to the scheduler so that work
                # resumes as if it was submitted for real and koji announced
                # its completion.
                for event in further_work:
                    log.info("  Scheduling faked event %r" % event)
                    self.incoming.put(event)

            log.debug("Done with %s" % idx)
Esempio n. 9
0
 def can_parse(self, topic, msg):
     log.debug(topic)
     if not any([topic.endswith(s) for s in self.topic_suffixes]):
         return False
     return True
Esempio n. 10
0
    def build_container(self,
                        source_url,
                        branch,
                        target,
                        scratch=None,
                        repo_urls=None,
                        isolated=False,
                        release=None,
                        koji_parent_build=None,
                        arch_override=None,
                        compose_ids=None,
                        operator_csv_modifications_url=None):
        """Build container by buildContainer

        :param str source_url: the container repository URL.
        :param str target: specify a specific build target.
        :param str branch: a build option passed to ``buildContainer``.
        :param bool scratch: a build option passed to ``buildContainer``.
        :param list[str] repo_urls: a build option passed to ``buildContainer``.
        :param bool isolated: a build option passed to ``buildContainer``.
        :param str release: a build option passed to ``buildContainer``.
        :param str koji_parent_build: a build option passed to ``buildContainer``.
        :param str arch_override: a build option passed to ``buildContainer``.
        :param list[str] compose_ids: a build option passed to ``buildContainer``.
            For details of these build options, please refer to
            ``PARAMS_SCHEMA`` defined inside ``BuildContainerTask`` in the
            koji-containerbuild plugin.
        :param str operator_csv_modifications_url: a build option passed to ``buildContainer``.
            This is necessary for bundle image rebuilds.
        :return: the container build task ID returned from ``buildContainer``.
        :rtype: int
        """

        build_target = target
        build_opts = {
            'scratch': False if scratch is None else scratch,
            'git_branch': branch,
        }

        if repo_urls:
            build_opts['yum_repourls'] = repo_urls
        if compose_ids:
            build_opts['compose_ids'] = compose_ids
        if isolated:
            build_opts['isolated'] = True
        if koji_parent_build:
            build_opts['koji_parent_build'] = koji_parent_build
        if arch_override:
            build_opts['arch_override'] = arch_override
        if release:
            build_opts['release'] = release
        if operator_csv_modifications_url:
            build_opts[
                'operator_csv_modifications_url'] = operator_csv_modifications_url

        log.debug('Build from target: %s', build_target)
        log.debug('Build options: %s', build_opts)

        if not self.dry_run:
            task_id = self.session.buildContainer(source_url, build_target,
                                                  build_opts)
        else:
            task_id = self._fake_build_container(source_url, build_target,
                                                 build_opts)

        log.info('Task %s is created to build docker image for %s', task_id,
                 source_url)
        log.info('Task info: %s/taskinfo?taskID=%s', self.weburl, task_id)

        return task_id