Пример #1
0
    def _make_request(self, entity, params):
        """
        Send a request to Pyxis

        :param str entity: entity part to construct a full URL for request.
        :param dict params: Pyxis query parameters.
        :return: Json response from Pyxis
        :rtype: dict
        :raises PyxisRequestError: If Pyxis returns error as a response
        """
        entity_url = urllib.parse.urljoin(self._api_root, entity)

        auth_method = HTTPKerberosAuth(mutual_authentication=OPTIONAL)
        response = requests.get(entity_url,
                                params=params,
                                auth=auth_method,
                                timeout=conf.net_timeout)

        if response.ok:
            return response.json()

        # Warn early, in case there is an error in the error handling code below
        log.warning("Request to %s gave %r", response.request.url, response)

        try:
            response_text = response.json()
        except ValueError:
            response_text = response.text

        raise PyxisRequestError(response.status_code, response_text)
Пример #2
0
def _in_memory_publish(topic, msg):
    """ Puts the message into the in memory work queue. """
    # Increment the message ID.
    global _in_memory_msg_id
    _in_memory_msg_id += 1

    config = conf.messaging_backends['in_memory']

    # Create fake fedmsg from the message so we can reuse
    # the BaseEvent.from_fedmsg code to get the particular BaseEvent
    # class instance.
    wrapped_msg = BaseEvent.from_fedmsg(
        config['SERVICE'] + "." + topic,
        {"msg_id": str(_in_memory_msg_id), "msg": msg},
    )

    # Put the message to queue.
    from freshmaker.consumer import work_queue_put
    try:
        work_queue_put(wrapped_msg)
    except ValueError as e:
        log.warning("No FreshmakerConsumer found.  Shutting down?  %r" % e)
    except AttributeError:
        # In the event that `moksha.hub._hub` hasn't yet been initialized, we
        # need to store messages on the side until it becomes available.
        # As a last-ditch effort, try to hang initial messages in the config.
        log.warning("Hub not initialized.  Queueing on the side.")
        _initial_messages.append(wrapped_msg)
Пример #3
0
def get_ocp_release_date(ocp_version):
    """ Get the OpenShift version release date via the Product Pages API

    :param str ocp_version: the OpenShift version
    :return: None or date in format of "%Y-%m-%d", example: 2021-02-23.
    :rtype: str or None
    """
    if not conf.product_pages_api_url:
        raise RuntimeError("Product Pages API url is not set in config")

    ocp_release = f"openshift-{ocp_version}"

    url = f"{conf.product_pages_api_url.rstrip('/')}/releases/{ocp_release}/schedule-tasks"
    resp = requests.get(
        url,
        params={"name": "GA", "fields": "name,date_finish"},
        timeout=conf.net_timeout,
    )

    if resp.status_code == 404:
        log.warning(f"GA date of {ocp_release} is not found via {resp.url}: {resp.reason}")
        return None
    if not resp.ok:
        resp.raise_for_status()
    return resp.json()[0]['date_finish']
Пример #4
0
def init_auth(login_manager, backend):
    """Initialize authentication backend

    Enable and initialize authentication backend to work with frontend
    authentication module running in Apache.
    """
    if backend == 'noauth':
        # Do not enable any authentication backend working with frontend
        # authentication module in Apache.
        log.warning("Authorization is disabled in Freshmaker configuration.")
        return
    if backend == 'kerberos':
        _validate_kerberos_config()
        global load_krb_user_from_request
        load_krb_user_from_request = login_manager.request_loader(
            load_krb_user_from_request)
    elif backend == 'openidc':
        global load_openidc_user
        load_openidc_user = login_manager.request_loader(load_openidc_user)
    elif backend == 'kerberos_or_ssl':
        _validate_kerberos_config()
        global load_krb_or_ssl_user_from_request
        load_krb_or_ssl_user_from_request = login_manager.request_loader(
            load_krb_or_ssl_user_from_request)
    elif backend == 'ssl':
        global load_ssl_user_from_request
        load_ssl_user_from_request = login_manager.request_loader(
            load_ssl_user_from_request)
    else:
        raise ValueError('Unknown backend name {0}.'.format(backend))
Пример #5
0
    def _get_pullspecs_mapping(self):
        """
        Get map of all replaced pullspecs from 'bundle_images' provided in an event.

        :rtype: dict
        :return: map of all '_old' pullspecs that was replaced by 'new'
            pullspecs in previous Freshmaker rebuilds
        """
        old_to_new_pullspec_map = dict()
        for bundle_nvr in self.event.bundle_images:
            artifact_build = db.session.query(ArtifactBuild).filter(
                ArtifactBuild.rebuilt_nvr == bundle_nvr,
                ArtifactBuild.type == ArtifactType.IMAGE.value,
            ).one_or_none()
            if artifact_build is None:
                log.warning(
                    f'Can\'t find build for a bundle image "{bundle_nvr}"')
                continue
            pullspec_overrides = artifact_build.bundle_pullspec_overrides
            for pullspec in pullspec_overrides['pullspec_replacements']:
                old_pullspec = pullspec.get('_old', None)
                if old_pullspec is None:
                    continue
                old_to_new_pullspec_map[old_pullspec] = pullspec['new']

        return old_to_new_pullspec_map
Пример #6
0
    def get_or_create_from_event(cls, session, event, released=True):
        # we must extract all needed arguments,
        # because event might not have some of them so we will use defaults
        requester = getattr(event, "requester", None)
        requested_rebuilds_list = getattr(event, "container_images", None)
        requested_rebuilds = None
        # make sure 'container_images' field is a list and convert it to str
        if requested_rebuilds_list is not None and \
                isinstance(requested_rebuilds_list, list):
            requested_rebuilds = " ".join(requested_rebuilds_list)
        requester_metadata = getattr(event, "requester_metadata_json", None)
        if requester_metadata is not None:
            # try to convert JSON into str, if it's invalid use None
            try:
                requester_metadata = json.dumps(requester_metadata)
            except TypeError:
                log.warning(
                    "requester_metadata_json field is ill-formatted: %s",
                    requester_metadata)
                requester_metadata = None

        return cls.get_or_create(session,
                                 event.msg_id,
                                 event.search_key,
                                 event.__class__,
                                 released=released,
                                 manual=event.manual,
                                 dry_run=event.dry_run,
                                 requester=requester,
                                 requested_rebuilds=requested_rebuilds,
                                 requester_metadata=requester_metadata)
Пример #7
0
    def _add_repositories_info(self, reg_repo_info):
        """
        For every pair of registry-repository add information about it's
        auto_rebuild tags. To decrease amount of queries to Pyxis, only one
        query is performed with the filter set to proper registry-repository
        pair.

        A list of tags for each repository will be added to the
        'auto_rebuild_tags' key in the input info about the repository.

        Entries about repos without 'auto_rebuild_tags' will be deleted from
        the mapping.

        :param dict reg_repo_info: map of pairs (registry, repository) to a
            dict containing nvrs of bundle images from that repo and
            auto_rebuild tags of that repo
        """
        if not reg_repo_info:
            return None
        fltr = ""
        # Construct filter for future request to Pyxis with registry-repository pairs
        for reg, repo in reg_repo_info.keys():
            if fltr:
                fltr += ','
            fltr += f'(registry=={reg};repository=={repo})'
        params = {
            'include':
            ','.join(
                ['data.auto_rebuild_tags', 'data.registry',
                 'data.repository']),
            'filter':
            fltr
        }
        repos = self._pagination('repositories', params)

        # For every repo add it's auto_rebuild_tags info
        for repo in repos:
            reg_repo_pair = (repo['registry'], repo['repository'])
            # one of repos isn't in previously constructed map,
            # so there is inconsistency
            if reg_repo_pair not in reg_repo_info:
                log.warning('There is inconsistency in naming for: %s/%s',
                            reg_repo_pair[0], reg_repo_pair[1])
                continue
            tags = repo.get('auto_rebuild_tags')
            # If the repository doesn't have 'auto_rebuild_tags', don't proceed with it
            if tags:
                reg_repo_info[reg_repo_pair]['auto_rebuild_tags'] = set(tags)
            else:
                del reg_repo_info[reg_repo_pair]
Пример #8
0
def get_url_for(*args, **kwargs):
    """
    flask.url_for wrapper which creates the app_context on-the-fly.
    """
    if has_app_context():
        return url_for(*args, **kwargs)

    # Localhost is right URL only when the scheduler runs on the same
    # system as the web views.
    app.config['SERVER_NAME'] = 'localhost'
    with app.app_context():
        log.warning("get_url_for() has been called without the Flask "
                    "app_context. That can lead to SQLAlchemy errors caused by "
                    "multiple session being used in the same time.")
        return url_for(*args, **kwargs)
Пример #9
0
    def ocp_is_released(self, ocp_version):
        """ Check if ocp_version is released by comparing the GA date with current date

        :param str ocp_version: the OpenShift Version
        :return: True if GA date in Product Pages is in the past, otherwise False
        :rtype: bool
        """
        ga_date_str = get_ocp_release_date(ocp_version)
        # None is returned if GA date is not found
        if not ga_date_str:
            log.warning(
                f"GA date of OpenShift {ocp_version} is not found in Product Pages, ignore it"
            )
            return False

        return datetime.now() > datetime.strptime(ga_date_str, "%Y-%m-%d")
Пример #10
0
    def fetch_cve_metadata(self, cve_list):
        """
        Fetches metadata about each CVE in `cve_list` and returns a tuple with
        the name of highest severity rate and the affected packages (a dictionary
        with product and pkg_name).
        See `SFM2API.THREAT_SEVERITIES` for list of possible severity rates.

        :param list cve_list: List of strings with CVE names.
        :rtype: str
        :return: Tuple, the first element is the name of highest severity rate occuring
        in CVEs from `cve_list`. The second element is a list of dicts, with "product"
        and "pkg_name" of the affected packages.
        """
        max_rating = -1
        affected_pkgs = []
        severity = None
        for cve in cve_list:
            try:
                elements = self.query_sfm2(cve)
            except requests.exceptions.HTTPError as e:
                if e.response.status_code == 400:
                    log.warning(
                        "The request for the CVE %s to the SFM2 API seems wrong, "
                        "impact and affected packages unknown. %s", cve, e.response.request.url)
                    continue
                if e.response.status_code == 500:
                    log.warning(
                        "Some error occurred looking forCVE %s with SFM2 API, "
                        "impact and affected packages unknown. %s", cve, e.response.request.url)
                    continue
                raise

            try:
                severity = elements['impact']
            except (IndexError, KeyError):
                log.warning("Some error occured looking for impact for CVE %s using SFM2 API", cve)

            try:
                affected_pkgs.extend([
                    {'product': item['ps_module'], 'pkg_name': item['ps_component']}
                    for item in elements['affects'] if (
                        item['affected'] != "notaffected" and
                        item['resolution'] not in ["wontfix", "ooss"])])
            except (KeyError, IndexError):
                log.exception("Some error occured looking for affected packages for CVE %s using SFM2 API", cve)

            try:
                rating = SFM2API.THREAT_SEVERITIES.index(severity)
            except ValueError:
                log.error("Unknown threat_severity '%s' for CVE %s",
                          severity, cve)
                continue

            max_rating = max(max_rating, rating)

        if max_rating == -1:
            return (None, affected_pkgs)
        return (SFM2API.THREAT_SEVERITIES[max_rating], affected_pkgs)
Пример #11
0
    def get_docker_repo_tags(self, errata_id):
        """
        Get ET repo/tag configuration using XML-RPC call
        get_advisory_cdn_docker_file_list
        :param int errata_id: Errata advisory ID.
        :rtype: dict
        :return: Dict of advisory builds with repo and tag config:
            {
                'build_NVR': {
                    'cdn_repo1': [
                        'tag1',
                        'tag2'
                    ],
                    ...
                },
                ...
            }
        """
        try:
            response = self.xmlrpc.get_advisory_cdn_docker_file_list(
                errata_id)
        except Exception:
            log.exception("Canot call XMLRPC get_advisory_cdn_docker_file_list call.")
            return None
        if response is None:
            log.warning("The get_advisory_cdn_docker_file_list XMLRPC call "
                        "returned None.")
            return None

        repo_tags = dict()
        for build_nvr in response:
            if build_nvr not in repo_tags:
                repo_tags[build_nvr] = dict()
            repos = response[build_nvr]['docker']['target']['repos']
            for repo in repos:
                tags = repos[repo]['tags']
                repo_tags[build_nvr][repo] = tags
        return repo_tags
Пример #12
0
    def get_latest_bundles(self, index_images):
        """
        Get latest bundle images per channel per index image

        :param list index_images: list of index images
        :return: latest bundle images per channel per index image
        :rtype: list
        """
        bundles_per_index_image = \
            self._get_bundles_per_index_image(index_images)

        ret_bundles = []
        for index_image, bundles in bundles_per_index_image.items():
            bundle_per_channel = {}
            # get latest versions of bundle images per channel
            for bundle in bundles:
                channel = bundle['channel_name']
                try:
                    # Always ensure the new version is a valid semantic version
                    new_ver = version.Version(bundle['version'])
                    if channel in bundle_per_channel:
                        old_ver = version.Version(
                            bundle_per_channel[channel]['version'])
                        if new_ver > old_ver:
                            bundle_per_channel[channel] = bundle
                    else:
                        bundle_per_channel[channel] = bundle
                # Check if the right format of version is used
                except version.InvalidVersion as e:
                    path = bundle.get('bundle_path', 'Unknown bundle path')
                    log.warning(
                        "Other format than SemVer is used in "
                        "bundle: %s", path)
                    log.warning(repr(e))
            ret_bundles.extend(bundle_per_channel.values())

        return ret_bundles
Пример #13
0
    def get_auto_rebuild_tagged_images(self, bundle_images):
        """
        Determine which bundle images are published to a container repository
        and was at least once tagged with an auto-rebuild tag.

        :param list bundle_images: Images of operator bundles that should
            be filtered
        :return: Image NVRs that where published to repositories with auto_rebuild tag
        :rtype: set(str)
        """
        reg_repo_info = {}
        for bundle_image in bundle_images:
            bundle_nvr = bundle_image.get('brew', {}).get('build')
            if not bundle_nvr:
                log.warning('One of bundle images doesn\'t have brew.build')
                continue
            if not bundle_image.get('repositories'):
                log.warning('Bundle image %s doesn\'t have repositories set',
                            bundle_nvr)
                continue
            # construct mapping of (registry, repository) -> {'nvrs': {bundles_nvrs}}
            for repo in bundle_image.get('repositories'):
                if not (repo.get('registry') and repo.get('repository')):
                    log.warning('"registry" or "repository" isn\'t set in %s',
                                bundle_nvr)
                    continue
                reg_repo = (repo['registry'], repo['repository'])
                reg_repo_info.setdefault(reg_repo, {})\
                    .setdefault('nvrs', set()).add(bundle_nvr)

        # Add auto_rebuild_tags to info structures for every repo
        self._add_repositories_info(reg_repo_info)
        # Get tag history for every repo and get nvrs tagged with auto_rebuild tag
        nvrs = self._filter_auto_rebuild_nvrs(reg_repo_info)
        if not nvrs:
            log.warning('Can\'t find any nvr tagged with an auto-rebuild tag')
        return nvrs
Пример #14
0
    def handle(self, event):
        if event.dry_run:
            self.force_dry_run()
        self.event = event

        db_event = Event.get_or_create_from_event(db.session, event)

        self.set_context(db_event)

        # Check if event is allowed by internal policies
        if not self.event.is_allowed(self):
            msg = ("This image rebuild is not allowed by internal policy. "
                   f"message_id: {event.msg_id}")
            db_event.transition(EventState.SKIPPED, msg)
            self.log_info(msg)
            return []

        # Get builds NVRs from the advisory attached to the message/event and
        # then get original NVR for every build

        # Mapping of original build nvrs to rebuilt nvrs in advisory
        nvrs_mapping = {}
        for product_info in event.advisory.builds.values():
            for build in product_info['builds']:
                # Search for the first build that triggered the chain of rebuilds
                # for every shipped NVR to get original NVR from it
                original_nvr = self.get_published_original_nvr(build['nvr'])
                if original_nvr is None:
                    continue
                nvrs_mapping[original_nvr] = build['nvr']

        original_nvrs = nvrs_mapping.keys()
        self.log_info(
            "Orignial nvrs of build in the advisory #{0} are: {1}".format(
                event.advisory.errata_id, " ".join(original_nvrs)))

        # Get image manifest_list_digest for all original images, manifest_list_digest is used
        # in pullspecs in bundle's related images
        original_digests_by_nvr = {}
        original_nvrs_by_digest = {}
        for nvr in original_nvrs:
            digest = self._pyxis.get_manifest_list_digest_by_nvr(nvr)
            if digest:
                original_digests_by_nvr[nvr] = digest
                original_nvrs_by_digest[digest] = nvr
            else:
                log.warning(
                    f"Image manifest_list_digest not found for original image {nvr} in Pyxis, "
                    "skip this image"
                )

        if not original_digests_by_nvr:
            msg = f"None of the original images have digests in Pyxis: {','.join(original_nvrs)}"
            log.warning(msg)
            db_event.transition(EventState.SKIPPED, msg)
            return []

        # Get image manifest_list_digest for all rebuilt images, manifest_list_digest is used
        # in pullspecs of bundle's related images
        rebuilt_digests_by_nvr = {}
        rebuilt_nvrs = nvrs_mapping.values()
        for nvr in rebuilt_nvrs:
            digest = self._pyxis.get_manifest_list_digest_by_nvr(nvr)
            if digest:
                rebuilt_digests_by_nvr[nvr] = digest
            else:
                log.warning(
                    f"Image manifest_list_digest not found for rebuilt image {nvr} in Pyxis, "
                    "skip this image"
                )

        if not rebuilt_digests_by_nvr:
            msg = f"None of the rebuilt images have digests in Pyxis: {','.join(rebuilt_nvrs)}"
            log.warning(msg)
            db_event.transition(EventState.SKIPPED, msg)
            return []

        index_images = self._pyxis.get_operator_indices()
        # get latest bundle images per channel per index image filtered
        # by the highest semantic version
        all_bundles = self._pyxis.get_latest_bundles(index_images)

        # A set of unique bundle digests
        bundle_digests = set()

        # get bundle digests for original images
        bundle_digests_by_related_nvr = {}
        for image_nvr, image_digest in original_digests_by_nvr.items():
            bundles = self._pyxis.get_bundles_by_related_image_digest(
                image_digest, all_bundles
            )
            if not bundles:
                log.info(f"No latest bundle image with the related image of {image_nvr}")
                continue

            for bundle in bundles:
                bundle_digest = bundle['bundle_path_digest']
                bundle_digests.add(bundle_digest)
                bundle_digests_by_related_nvr.setdefault(image_nvr, []).append(bundle_digest)

        if not bundle_digests_by_related_nvr:
            msg = "None of the original images have related bundles, skip."
            log.warning(msg)
            db_event.transition(EventState.SKIPPED, msg)
            return []

        # Mapping of bundle digest to bundle data
        # {
        #     digest: {
        #         "images": [image_amd64, image_aarch64],
        #         "nvr": NVR,
        #         "auto_rebuild": True/False,
        #         "osbs_pinning": True/False,
        #         "pullspecs": [...],
        #     }
        # }
        bundles_by_digest = {}
        default_bundle_data = {
            'images': [],
            'nvr': None,
            'auto_rebuild': False,
            'osbs_pinning': False,
            'pullspecs': [],
        }

        # Get images for each bundle digest, a bundle digest can have multiple images
        # with different arches.
        for digest in bundle_digests:
            bundles = self._pyxis.get_images_by_digest(digest)
            # If no bundle image found, just skip this bundle digest
            if not bundles:
                continue

            bundles_by_digest.setdefault(digest, copy.deepcopy(default_bundle_data))
            bundles_by_digest[digest]['nvr'] = bundles[0]['brew']['build']
            bundles_by_digest[digest]['images'] = bundles

        # Unauthenticated koji session to fetch build info of bundles
        koji_api = KojiService(conf.koji_profile)

        # For each bundle, check whether it should be rebuilt by comparing the
        # auto_rebuild_tags of repository and bundle's tags
        for digest, bundle_data in bundles_by_digest.items():
            bundle_nvr = bundle_data['nvr']

            # Images are for different arches, just check against the first image
            image = bundle_data['images'][0]
            if self.image_has_auto_rebuild_tag(image):
                bundle_data['auto_rebuild'] = True

            # Fetch buildinfo
            buildinfo = koji_api.get_build(bundle_nvr)
            related_images = (
                buildinfo.get("extra", {})
                .get("image", {})
                .get("operator_manifests", {})
                .get("related_images", {})
            )
            bundle_data['osbs_pinning'] = related_images.get('created_by_osbs', False)
            # Save the original pullspecs
            bundle_data['pullspecs'] = related_images.get('pullspecs', [])

        # Digests of bundles to be rebuilt
        to_rebuild_digests = set()

        # Now for each bundle, replace the original digest with rebuilt
        # digest (override pullspecs)
        for digest, bundle_data in bundles_by_digest.items():
            # Override pullspecs only when auto_rebuild is enabled and OSBS-pinning
            # mechanism is used.
            if not (bundle_data['auto_rebuild'] and bundle_data['osbs_pinning']):
                continue

            for pullspec in bundle_data['pullspecs']:
                # A pullspec item example:
                # {
                #   'new': 'registry.exampe.io/repo/example-operator@sha256:<sha256-value>'
                #   'original': 'registry.example.io/repo/example-operator:v2.2.0',
                #   'pinned': True
                # }

                # If related image is not pinned by OSBS, skip
                if not pullspec.get('pinned', False):
                    continue

                # A pullspec path is in format of "registry/repository@digest"
                pullspec_elems = pullspec.get('new').split('@')
                old_digest = pullspec_elems[1]

                if old_digest not in original_nvrs_by_digest:
                    # This related image is not one of the original images
                    continue

                # This related image is one of our original images
                old_nvr = original_nvrs_by_digest[old_digest]
                new_nvr = nvrs_mapping[old_nvr]
                new_digest = rebuilt_digests_by_nvr[new_nvr]

                # Replace the old digest with new digest
                pullspec_elems[1] = new_digest
                new_pullspec = '@'.join(pullspec_elems)
                pullspec['new'] = new_pullspec

                # Once a pullspec in this bundle has been overrided, add this bundle
                # to rebuild list
                to_rebuild_digests.add(digest)

        # Skip that event because we can't proceed with processing it.
        # TODO
        # Now when we have bundle images' nvrs we can procceed with rebuilding it
        msg = f"Skipping the rebuild of {len(to_rebuild_digests)} bundle images " \
              "due to being blocked on further implementation for now."
        db_event.transition(EventState.SKIPPED, msg)
        return []
Пример #15
0
    def _handle_auto_rebuild(self, db_event):
        """
        Handle auto rebuild for an advisory created by Botas

        :param db_event: database event that represent rebuild event
        :rtype: list
        :return: list of advisories that should be rebuilt
        """
        # Mapping of original build nvrs to rebuilt nvrs in advisory
        nvrs_mapping = self._create_original_to_rebuilt_nvrs_map()

        original_nvrs = nvrs_mapping.keys()
        self.log_info(
            "Orignial nvrs of build in the advisory #{0} are: {1}".format(
                self.event.advisory.errata_id, " ".join(original_nvrs)))

        # Get image manifest_list_digest for all original images, manifest_list_digest is used
        # in pullspecs in bundle's related images
        original_digests_by_nvr = {}
        original_nvrs_by_digest = {}
        for nvr in original_nvrs:
            digest = self._pyxis.get_manifest_list_digest_by_nvr(nvr)
            if digest:
                original_digests_by_nvr[nvr] = digest
                original_nvrs_by_digest[digest] = nvr
            else:
                log.warning(
                    f"Image manifest_list_digest not found for original image {nvr} in Pyxis, "
                    "skip this image")

        if not original_digests_by_nvr:
            msg = f"None of the original images have digests in Pyxis: {','.join(original_nvrs)}"
            log.warning(msg)
            db_event.transition(EventState.SKIPPED, msg)
            return []

        # Get image manifest_list_digest for all rebuilt images, manifest_list_digest is used
        # in pullspecs of bundle's related images
        rebuilt_digests_by_nvr = {}
        rebuilt_nvrs = nvrs_mapping.values()
        for nvr in rebuilt_nvrs:
            # Don't require that the manifest list digest be published in this case because
            # there's a delay from after an advisory is shipped and when the published repositories
            # entry is populated
            digest = self._pyxis.get_manifest_list_digest_by_nvr(
                nvr, must_be_published=False)
            if digest:
                rebuilt_digests_by_nvr[nvr] = digest
            else:
                log.warning(
                    f"Image manifest_list_digest not found for rebuilt image {nvr} in Pyxis, "
                    "skip this image")

        if not rebuilt_digests_by_nvr:
            msg = f"None of the rebuilt images have digests in Pyxis: {','.join(rebuilt_nvrs)}"
            log.warning(msg)
            db_event.transition(EventState.SKIPPED, msg)
            return []

        index_images = self._pyxis.get_operator_indices()
        # get latest bundle images per channel per index image filtered
        # by the highest semantic version
        all_bundles = self._pyxis.get_latest_bundles(index_images)
        self.log_debug(
            "There are %d bundles that are latest in a channel in the found index images",
            len(all_bundles),
        )

        # A mapping of digests to bundle metadata. This metadata is used to
        # for the CSV metadata updates.
        bundle_mds_by_digest = {}

        # get bundle digests for original images
        bundle_digests_by_related_nvr = {}
        for image_nvr, image_digest in original_digests_by_nvr.items():
            bundles = self._pyxis.get_bundles_by_related_image_digest(
                image_digest, all_bundles)
            if not bundles:
                log.info(
                    f"No latest bundle image with the related image of {image_nvr}"
                )
                continue

            for bundle in bundles:
                bundle_digest = bundle['bundle_path_digest']
                bundle_mds_by_digest[bundle_digest] = bundle
                bundle_digests_by_related_nvr.setdefault(
                    image_nvr, []).append(bundle_digest)

        if not bundle_digests_by_related_nvr:
            msg = "None of the original images have related bundles, skip."
            log.warning(msg)
            db_event.transition(EventState.SKIPPED, msg)
            return []
        self.log_info("Found %d bundles with relevant related images",
                      len(bundle_digests_by_related_nvr))

        # Mapping of bundle digest to bundle data
        # {
        #     digest: {
        #         "images": [image_amd64, image_aarch64],
        #         "nvr": NVR,
        #         "auto_rebuild": True/False,
        #         "osbs_pinning": True/False,
        #         "pullspecs": [...],
        #     }
        # }
        bundles_by_digest = {}
        default_bundle_data = {
            'images': [],
            'nvr': None,
            'auto_rebuild': False,
            'osbs_pinning': False,
            # CSV modifications for the rebuilt bundle image
            'pullspec_replacements': [],
            'update': {},
        }

        # Get images for each bundle digest, a bundle digest can have multiple images
        # with different arches.
        for digest in bundle_mds_by_digest:
            bundles = self._pyxis.get_images_by_digest(digest)
            # If no bundle image found, just skip this bundle digest
            if not bundles:
                self.log_warn(
                    'The bundle digest %r was not found in Pyxis. Skipping.',
                    digest)
                continue

            bundles_by_digest.setdefault(digest,
                                         copy.deepcopy(default_bundle_data))
            bundles_by_digest[digest]['nvr'] = bundles[0]['brew']['build']
            bundles_by_digest[digest]['images'] = bundles

        # Unauthenticated koji session to fetch build info of bundles
        koji_api = KojiService(conf.koji_profile)

        # For each bundle, check whether it should be rebuilt by comparing the
        # auto_rebuild_tags of repository and bundle's tags
        for digest, bundle_data in bundles_by_digest.items():
            bundle_nvr = bundle_data['nvr']

            # Images are for different arches, just check against the first image
            image = bundle_data['images'][0]
            if self.image_has_auto_rebuild_tag(image):
                bundle_data['auto_rebuild'] = True

            # Fetch buildinfo
            buildinfo = koji_api.get_build(bundle_nvr)
            related_images = (buildinfo.get('extra', {}).get('image', {}).get(
                'operator_manifests', {}).get('related_images', {}))
            bundle_data['osbs_pinning'] = related_images.get(
                'created_by_osbs', False)
            # Save the original pullspecs
            bundle_data['pullspec_replacements'] = related_images.get(
                'pullspecs', [])

        # Digests of bundles to be rebuilt
        to_rebuild_digests = set()

        # Now for each bundle, replace the original digest with rebuilt
        # digest (override pullspecs)
        for digest, bundle_data in bundles_by_digest.items():
            # Override pullspecs only when auto_rebuild is enabled and OSBS-pinning
            # mechanism is used.
            if not (bundle_data['auto_rebuild']
                    and bundle_data['osbs_pinning']):
                self.log_info(
                    'The bundle %r does not have auto-rebuild tags (%r) and/or OSBS pinning (%r)',
                    bundle_data['nvr'],
                    bundle_data['auto_rebuild'],
                    bundle_data['osbs_pinning'],
                )
                continue

            csv_name = bundle_mds_by_digest[digest]['csv_name']
            version = bundle_mds_by_digest[digest]['version']
            bundle_data.update(self._get_csv_updates(csv_name, version))

            for pullspec in bundle_data['pullspec_replacements']:
                # A pullspec item example:
                # {
                #   'new': 'registry.exampe.io/repo/example-operator@sha256:<sha256-value>',
                #   'original': 'registry.example.io/repo/example-operator:v2.2.0',
                #   'pinned': True,
                #   # value used for internal purpose during manual rebuilds, it's an old pullspec that was replaced
                #   '_old': 'registry.exampe.io/repo/example-operator@sha256:<previous-sha256-value>,
                # }

                # A pullspec path is in format of "registry/repository@digest"
                pullspec_elems = pullspec.get('new').split('@')
                old_digest = pullspec_elems[1]

                if old_digest not in original_nvrs_by_digest:
                    # This related image is not one of the original images
                    continue

                # This related image is one of our original images
                old_nvr = original_nvrs_by_digest[old_digest]
                new_nvr = nvrs_mapping[old_nvr]
                new_digest = rebuilt_digests_by_nvr[new_nvr]

                # save pullspec that image had before rebuild
                pullspec['_old'] = pullspec.get('new')

                # Replace the old digest with new digest
                pullspec_elems[1] = new_digest
                new_pullspec = '@'.join(pullspec_elems)
                pullspec['new'] = new_pullspec
                # Always set pinned to True when it was replaced by Freshmaker
                # since it indicates that the pullspec was modified from the
                # original pullspec
                pullspec['pinned'] = True

                # Once a pullspec in this bundle has been overrided, add this bundle
                # to rebuild list
                self.log_info(
                    'Changing pullspec %r to %r in the bundle %r',
                    pullspec['_old'],
                    pullspec['new'],
                    bundle_data['nvr'],
                )
                to_rebuild_digests.add(digest)

        if not to_rebuild_digests:
            msg = self._no_bundle_prefix + "No bundle images to rebuild for " \
                                           f"advisory {self.event.advisory.name}"
            self.log_info(msg)
            db_event.transition(EventState.SKIPPED, msg)
            db.session.commit()
            return []

        bundles_to_rebuild = list(
            map(lambda x: bundles_by_digest[x], to_rebuild_digests))
        return bundles_to_rebuild
Пример #16
0
    def handle(self, event):
        if event.dry_run:
            self.force_dry_run()
        self.event = event

        db_event = Event.get_or_create_from_event(db.session, event)

        self.set_context(db_event)

        # Check if event is allowed by internal policies
        if not self.event.is_allowed(self):
            msg = ("This image rebuild is not allowed by internal policy. "
                   f"message_id: {event.msg_id}")
            db_event.transition(EventState.SKIPPED, msg)
            self.log_info(msg)
            return []

        # Mapping of original build nvrs to rebuilt nvrs in advisory
        nvrs_mapping = self._create_original_to_rebuilt_nvrs_map()

        original_nvrs = nvrs_mapping.keys()
        self.log_info(
            "Orignial nvrs of build in the advisory #{0} are: {1}".format(
                event.advisory.errata_id, " ".join(original_nvrs)))

        # Get image manifest_list_digest for all original images, manifest_list_digest is used
        # in pullspecs in bundle's related images
        original_digests_by_nvr = {}
        original_nvrs_by_digest = {}
        for nvr in original_nvrs:
            digest = self._pyxis.get_manifest_list_digest_by_nvr(nvr)
            if digest:
                original_digests_by_nvr[nvr] = digest
                original_nvrs_by_digest[digest] = nvr
            else:
                log.warning(
                    f"Image manifest_list_digest not found for original image {nvr} in Pyxis, "
                    "skip this image")

        if not original_digests_by_nvr:
            msg = f"None of the original images have digests in Pyxis: {','.join(original_nvrs)}"
            log.warning(msg)
            db_event.transition(EventState.SKIPPED, msg)
            return []

        # Get image manifest_list_digest for all rebuilt images, manifest_list_digest is used
        # in pullspecs of bundle's related images
        rebuilt_digests_by_nvr = {}
        rebuilt_nvrs = nvrs_mapping.values()
        for nvr in rebuilt_nvrs:
            digest = self._pyxis.get_manifest_list_digest_by_nvr(nvr)
            if digest:
                rebuilt_digests_by_nvr[nvr] = digest
            else:
                log.warning(
                    f"Image manifest_list_digest not found for rebuilt image {nvr} in Pyxis, "
                    "skip this image")

        if not rebuilt_digests_by_nvr:
            msg = f"None of the rebuilt images have digests in Pyxis: {','.join(rebuilt_nvrs)}"
            log.warning(msg)
            db_event.transition(EventState.SKIPPED, msg)
            return []

        index_images = self._pyxis.get_operator_indices()
        # get latest bundle images per channel per index image filtered
        # by the highest semantic version
        all_bundles = self._pyxis.get_latest_bundles(index_images)

        # A mapping of digests to bundle metadata. This metadata is used to
        # for the CSV metadata updates.
        bundle_mds_by_digest = {}

        # get bundle digests for original images
        bundle_digests_by_related_nvr = {}
        for image_nvr, image_digest in original_digests_by_nvr.items():
            bundles = self._pyxis.get_bundles_by_related_image_digest(
                image_digest, all_bundles)
            if not bundles:
                log.info(
                    f"No latest bundle image with the related image of {image_nvr}"
                )
                continue

            for bundle in bundles:
                bundle_digest = bundle['bundle_path_digest']
                bundle_mds_by_digest[bundle_digest] = bundle
                bundle_digests_by_related_nvr.setdefault(
                    image_nvr, []).append(bundle_digest)

        if not bundle_digests_by_related_nvr:
            msg = "None of the original images have related bundles, skip."
            log.warning(msg)
            db_event.transition(EventState.SKIPPED, msg)
            return []

        # Mapping of bundle digest to bundle data
        # {
        #     digest: {
        #         "images": [image_amd64, image_aarch64],
        #         "nvr": NVR,
        #         "auto_rebuild": True/False,
        #         "osbs_pinning": True/False,
        #         "pullspecs": [...],
        #     }
        # }
        bundles_by_digest = {}
        default_bundle_data = {
            'images': [],
            'nvr': None,
            'auto_rebuild': False,
            'osbs_pinning': False,
            # CSV modifications for the rebuilt bundle image
            'pullspecs': [],
            'append': {},
            'update': {},
        }

        # Get images for each bundle digest, a bundle digest can have multiple images
        # with different arches.
        for digest in bundle_mds_by_digest:
            bundles = self._pyxis.get_images_by_digest(digest)
            # If no bundle image found, just skip this bundle digest
            if not bundles:
                continue

            bundles_by_digest.setdefault(digest,
                                         copy.deepcopy(default_bundle_data))
            bundles_by_digest[digest]['nvr'] = bundles[0]['brew']['build']
            bundles_by_digest[digest]['images'] = bundles

        # Unauthenticated koji session to fetch build info of bundles
        koji_api = KojiService(conf.koji_profile)

        # For each bundle, check whether it should be rebuilt by comparing the
        # auto_rebuild_tags of repository and bundle's tags
        for digest, bundle_data in bundles_by_digest.items():
            bundle_nvr = bundle_data['nvr']

            # Images are for different arches, just check against the first image
            image = bundle_data['images'][0]
            if self.image_has_auto_rebuild_tag(image):
                bundle_data['auto_rebuild'] = True

            # Fetch buildinfo
            buildinfo = koji_api.get_build(bundle_nvr)
            related_images = (buildinfo.get('extra', {}).get('image', {}).get(
                'operator_manifests', {}).get('related_images', {}))
            bundle_data['osbs_pinning'] = related_images.get(
                'created_by_osbs', False)
            # Save the original pullspecs
            bundle_data['pullspecs'] = related_images.get('pullspecs', [])

        # Digests of bundles to be rebuilt
        to_rebuild_digests = set()

        # Now for each bundle, replace the original digest with rebuilt
        # digest (override pullspecs)
        for digest, bundle_data in bundles_by_digest.items():
            # Override pullspecs only when auto_rebuild is enabled and OSBS-pinning
            # mechanism is used.
            if not (bundle_data['auto_rebuild']
                    and bundle_data['osbs_pinning']):
                continue

            csv_name = bundle_mds_by_digest[digest]['csv_name']
            version = bundle_mds_by_digest[digest]['version']
            bundle_data.update(self._get_csv_updates(csv_name, version))

            for pullspec in bundle_data['pullspecs']:
                # A pullspec item example:
                # {
                #   'new': 'registry.exampe.io/repo/example-operator@sha256:<sha256-value>'
                #   'original': 'registry.example.io/repo/example-operator:v2.2.0',
                #   'pinned': True
                # }

                # A pullspec path is in format of "registry/repository@digest"
                pullspec_elems = pullspec.get('new').split('@')
                old_digest = pullspec_elems[1]

                if old_digest not in original_nvrs_by_digest:
                    # This related image is not one of the original images
                    continue

                # This related image is one of our original images
                old_nvr = original_nvrs_by_digest[old_digest]
                new_nvr = nvrs_mapping[old_nvr]
                new_digest = rebuilt_digests_by_nvr[new_nvr]

                # Replace the old digest with new digest
                pullspec_elems[1] = new_digest
                new_pullspec = '@'.join(pullspec_elems)
                pullspec['new'] = new_pullspec
                # Always set pinned to True when it was replaced by Freshmaker
                # since it indicates that the pullspec was modified from the
                # original pullspec
                pullspec['pinned'] = True

                # Once a pullspec in this bundle has been overrided, add this bundle
                # to rebuild list
                to_rebuild_digests.add(digest)

        if not to_rebuild_digests:
            msg = f"No bundle images to rebuild for advisory {event.advisory.name}"
            self.log_info(msg)
            db_event.transition(EventState.SKIPPED, msg)
            db.session.commit()
            return []

        builds = self._prepare_builds(db_event, bundles_by_digest,
                                      to_rebuild_digests)

        # Reset context to db_event.
        self.set_context(db_event)

        self.start_to_build_images(builds)
        msg = f"Advisory {db_event.search_key}: Rebuilding " \
              f"{len(db_event.builds.all())} bundle images."
        db_event.transition(EventState.BUILDING, msg)

        return []
    def handle(self, event):
        if event.dry_run:
            self.force_dry_run()
        self.event = event

        db_event = Event.get_or_create_from_event(db.session, event)

        self.set_context(db_event)

        # Check if event is allowed by internal policies
        if not self.event.is_allowed(self):
            msg = ("This image rebuild is not allowed by internal policy. "
                   f"message_id: {event.msg_id}")
            db_event.transition(EventState.SKIPPED, msg)
            self.log_info(msg)
            return []

        # Get builds NVRs from the advisory attached to the message/event and
        # then get original NVR for every build
        original_nvrs = set()
        for product_info in event.advisory.builds.values():
            for build in product_info['builds']:
                # Search for the first build that triggered the chain of rebuilds
                # for every shipped NVR to get original NVR from it
                original_nvr = self.get_published_original_nvr(build['nvr'])
                if original_nvr is None:
                    continue
                original_nvrs.add(original_nvr)

        self.log_info(
            "Orignial nvrs of build in the advisory #{0} are: {1}".format(
                event.advisory.errata_id, " ".join(original_nvrs)))
        # Get images by nvrs and then get their digests
        original_images_digests = self._pyxis.get_digests_by_nvrs(
            original_nvrs)
        if not original_images_digests:
            msg = f"There are no digests for NVRs: {','.join(original_nvrs)}"
            log.warning(msg)
            db_event.transition(EventState.SKIPPED, msg)
            return []

        index_images = self._pyxis.get_operator_indices()
        # get latest bundle images per channel per index image filtered
        # by the highest semantic version
        all_bundles = self._pyxis.get_latest_bundles(index_images)

        bundles = self._pyxis.filter_bundles_by_related_image_digests(
            original_images_digests, all_bundles)
        bundle_digests = set()
        for bundle in bundles:
            if not bundle.get('bundle_path_digest'):
                log.warning("Bundle %s doesn't have 'bundle_path_digests' set",
                            bundle['bundle_path'])
                continue
            bundle_digests.add(bundle['bundle_path_digest'])
        bundle_images = self._pyxis.get_images_by_digests(bundle_digests)

        # Filter image nvrs that don't have or never had auto_rebuild tag
        # in repos, where image is published
        auto_rebuild_nvrs = self._pyxis.get_auto_rebuild_tagged_images(
            bundle_images)

        # get NVRs only of those bundles, which have OSBS pinning
        bundles_nvrs = self._filter_bundles_by_pinned_related_images(
            auto_rebuild_nvrs)

        # Skip that event because we can't proceed with processing it.
        # TODO
        # Now when we have bundle images' nvrs we can procceed with rebuilding it
        msg = f"Skipping the rebuild of {len(bundles_nvrs)} bundle images " \
              "due to being blocked on further implementation for now."
        db_event.transition(EventState.SKIPPED, msg)
        return []
Пример #18
0
    def _handle_manual_rebuild(self, db_event):
        """
        Handle manual rebuild submitted by Release Driver for an advisory created by Botas

        :param db_event: database event that represents a rebuild event
        :rtype: list
        :return: list of advisories that should be rebuilt
        """
        old_to_new_pullspec_map = self._get_pullspecs_mapping()

        if not old_to_new_pullspec_map:
            msg = self._no_bundle_prefix + 'None of the bundle images have ' \
                                           'applicable pullspecs to replace'
            log.warning(msg)
            db_event.transition(EventState.SKIPPED, msg)
            return []

        # Unauthenticated koji session to fetch build info of bundles
        koji_api = KojiService(conf.koji_profile)
        rebuild_nvr_to_pullspecs_map = dict()
        # compare replaced pullspecs with pullspecs in 'container_images' and
        # create map for bundles that should be rebuilt with their nvrs
        for container_image_nvr in self.event.container_images:
            artifact_build = db.session.query(ArtifactBuild).filter(
                ArtifactBuild.rebuilt_nvr == container_image_nvr,
                ArtifactBuild.type == ArtifactType.IMAGE.value,
            ).one_or_none()
            pullspecs = []
            # Try to find build in FM database, if it's not there check in Brew
            if artifact_build:
                pullspecs = artifact_build.bundle_pullspec_overrides[
                    "pullspec_replacements"]
            else:
                # Fetch buildinfo from Koji
                buildinfo = koji_api.get_build(container_image_nvr)
                # Get the original pullspecs
                pullspecs = (buildinfo.get('extra', {}).get('image', {}).get(
                    'operator_manifests', {}).get('related_images',
                                                  {}).get('pullspecs', []))

            for pullspec in pullspecs:
                if pullspec.get('new') not in old_to_new_pullspec_map:
                    continue
                # use newer pullspecs in the image
                pullspec['new'] = old_to_new_pullspec_map[pullspec['new']]
                rebuild_nvr_to_pullspecs_map[container_image_nvr] = pullspecs

        if not rebuild_nvr_to_pullspecs_map:
            msg = self._no_bundle_prefix + 'None of the container images have ' \
                                           'applicable pullspecs from the input bundle images'
            log.info(msg)
            db_event.transition(EventState.SKIPPED, msg)
            return []

        # list with metadata about every bundle to do rebuild
        to_rebuild_bundles = []
        # fill 'append' and 'update' fields for bundles to rebuild
        for nvr, pullspecs in rebuild_nvr_to_pullspecs_map.items():
            bundle_digest = self._pyxis.get_manifest_list_digest_by_nvr(nvr)
            if bundle_digest is not None:
                bundles = self._pyxis.get_bundles_by_digest(bundle_digest)
                temp_bundle = bundles[0]
                csv_updates = (self._get_csv_updates(temp_bundle['csv_name'],
                                                     temp_bundle['version']))
                to_rebuild_bundles.append({
                    'nvr': nvr,
                    'update': csv_updates['update'],
                    'pullspec_replacements': pullspecs,
                })
            else:
                log.warning('Can\'t find manifest_list_digest for bundle '
                            f'"{nvr}" in Pyxis')

        if not to_rebuild_bundles:
            msg = 'Can\'t find digests for any of the bundles to rebuild'
            log.warning(msg)
            db_event.transition(EventState.FAILED, msg)
            return []

        return to_rebuild_bundles